From c25887ab99fa39701f59c0bc139e9c1dc6a58359 Mon Sep 17 00:00:00 2001 From: Matt Jesuele Date: Fri, 3 Apr 2020 19:00:55 -0700 Subject: [PATCH 01/60] go/libraries/doltcore/env/dolt_docs.go: Add newline to end of initial LICENSE/README text --- go/libraries/doltcore/env/dolt_docs.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go/libraries/doltcore/env/dolt_docs.go b/go/libraries/doltcore/env/dolt_docs.go index 830e96b031..1fe73e430f 100644 --- a/go/libraries/doltcore/env/dolt_docs.go +++ b/go/libraries/doltcore/env/dolt_docs.go @@ -21,8 +21,8 @@ import ( "github.com/liquidata-inc/dolt/go/store/types" ) -var initialReadme = "This is a repository level README. Either edit it, add it, and commit it, or remove the file." -var initialLicense = "This is a repository level LICENSE. Either edit it, add it, and commit it, or remove the file." +var initialReadme = "This is a repository level README. Either edit it, add it, and commit it, or remove the file.\n" +var initialLicense = "This is a repository level LICENSE. Either edit it, add it, and commit it, or remove the file.\n" type Docs []doltdb.DocDetails From 8428754ba34ce0d5b69cfd9626fa10aa20e905d7 Mon Sep 17 00:00:00 2001 From: Matt Jesuele Date: Mon, 6 Apr 2020 23:02:30 -0700 Subject: [PATCH 02/60] go/cmd/dolt/commands: Handle some verrs which were previously being discarded --- go/cmd/dolt/commands/cnfcmds/cat.go | 58 ++++++++++++++----------- go/cmd/dolt/commands/cnfcmds/resolve.go | 54 +++++++++++------------ go/cmd/dolt/commands/tblcmds/rm.go | 23 +++++----- 3 files changed, 72 insertions(+), 63 deletions(-) diff --git a/go/cmd/dolt/commands/cnfcmds/cat.go b/go/cmd/dolt/commands/cnfcmds/cat.go index f2d45fd3ab..c5a8e7faac 100644 --- a/go/cmd/dolt/commands/cnfcmds/cat.go +++ b/go/cmd/dolt/commands/cnfcmds/cat.go @@ -89,40 +89,48 @@ func (cmd CatCmd) Exec(ctx context.Context, commandStr string, args []string, dE } root, verr := commands.GetWorkingWithVErr(dEnv) - - if verr == nil { - var cm *doltdb.Commit - cm, verr = commands.MaybeGetCommitWithVErr(dEnv, args[0]) - - if verr == nil { - if cm != nil { - args = args[1:] - - var err error - root, err = cm.GetRootValue() - - if err != nil { - verr = errhand.BuildDError("unable to get the root value").AddCause(err).Build() - } - } - - if len(args) == 0 { - usage() - return 1 - } - - verr = printConflicts(ctx, root, args) - } + if verr != nil { + return exitWithVerr(verr) } + cm, verr := commands.MaybeGetCommitWithVErr(dEnv, args[0]) if verr != nil { - cli.PrintErrln(verr.Verbose()) + return exitWithVerr(verr) + } + + // If no commit was resolved from the first argument, assume the args are all table names and print the conflicts + if cm == nil { + if verr := printConflicts(ctx, root, args); verr != nil { + return exitWithVerr(verr) + } + + return 0 + } + + tblNames := args[1:] + if len(tblNames) == 0 { + cli.Println("No tables specified") + usage() return 1 } + root, err := cm.GetRootValue() + if err != nil { + return exitWithVerr(errhand.BuildDError("unable to get the root value").AddCause(err).Build()) + } + + if verr = printConflicts(ctx, root, tblNames); verr != nil { + return exitWithVerr(verr) + } + return 0 } +func exitWithVerr(verr errhand.VerboseError) int { + cli.PrintErrln(verr.Verbose()) + return 1 +} + func printConflicts(ctx context.Context, root *doltdb.RootValue, tblNames []string) errhand.VerboseError { if len(tblNames) == 1 && tblNames[0] == "." { var err error diff --git a/go/cmd/dolt/commands/cnfcmds/resolve.go b/go/cmd/dolt/commands/cnfcmds/resolve.go index 74b7aa6ab4..1904028183 100644 --- a/go/cmd/dolt/commands/cnfcmds/resolve.go +++ b/go/cmd/dolt/commands/cnfcmds/resolve.go @@ -187,40 +187,38 @@ func manualResolve(ctx context.Context, apr *argparser.ArgParseResults, dEnv *en invalid, notFound, updatedTbl, err := tbl.ResolveConflicts(ctx, keysToResolve) if err != nil { - verr = errhand.BuildDError("fatal: Failed to resolve conflicts").AddCause(err).Build() - } else { - for _, key := range invalid { - cli.Println(key, "is not a valid key") - } + return errhand.BuildDError("fatal: Failed to resolve conflicts").AddCause(err).Build() + } - for _, key := range notFound { - cli.Println(key, "is not the primary key of a conflicting row") - } + for _, key := range invalid { + cli.Println(key, "is not a valid key") + } - updatedHash, err := updatedTbl.HashOf() + for _, key := range notFound { + cli.Println(key, "is not the primary key of a conflicting row") + } + + updatedHash, err := updatedTbl.HashOf() + + if err != nil { + return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build() + } + + hash, err := tbl.HashOf() + + if err != nil { + return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build() + } + + if hash == updatedHash { + root, err := root.PutTable(ctx, tblName, updatedTbl) if err != nil { - return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build() + return errhand.BuildDError("").AddCause(err).Build() } - hash, err := tbl.HashOf() - - if err != nil { - return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build() - } - - if hash == updatedHash { - root, err := root.PutTable(ctx, tblName, updatedTbl) - - if err != nil { - return errhand.BuildDError("").AddCause(err).Build() - } - - verr = commands.UpdateWorkingWithVErr(dEnv, root) - - if verr != nil { - return verr - } + if verr := commands.UpdateWorkingWithVErr(dEnv, root); verr != nil { + return verr } } diff --git a/go/cmd/dolt/commands/tblcmds/rm.go b/go/cmd/dolt/commands/tblcmds/rm.go index 9d6329abd4..b4240c51dd 100644 --- a/go/cmd/dolt/commands/tblcmds/rm.go +++ b/go/cmd/dolt/commands/tblcmds/rm.go @@ -84,23 +84,26 @@ func (cmd RmCmd) Exec(ctx context.Context, commandStr string, args []string, dEn } working, verr := commands.GetWorkingWithVErr(dEnv) - - if verr == nil { - verr := commands.ValidateTablesWithVErr(apr.Args(), working) - - if verr == nil { - verr = removeTables(ctx, dEnv, apr.Args(), working) - } + if verr != nil { + return exitWithVerr(verr) } - if verr != nil { - cli.PrintErrln(verr.Verbose()) - return 1 + if verr := commands.ValidateTablesWithVErr(apr.Args(), working); verr != nil { + return exitWithVerr(verr) + } + + if verr := removeTables(ctx, dEnv, apr.Args(), working); verr != nil { + return exitWithVerr(verr) } return 0 } +func exitWithVerr(verr errhand.VerboseError) int { + cli.PrintErrln(verr.Verbose()) + return 1 +} + func removeTables(ctx context.Context, dEnv *env.DoltEnv, tables []string, working *doltdb.RootValue) errhand.VerboseError { working, err := working.RemoveTables(ctx, tables...) From 7ead0abbf63ff6cacb7db3ec19021f8a541751c7 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 11:52:19 -0700 Subject: [PATCH 03/60] Added json output format for SQL, and fixed null printing bug in CSV output Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/cnfcmds/cat.go | 2 +- go/cmd/dolt/commands/diff.go | 2 +- go/cmd/dolt/commands/sql.go | 17 ++++++++++++++--- go/libraries/doltcore/diff/sql_diff.go | 2 +- .../table/untyped/nullprinter/nullprinter.go | 15 +++++++++++---- 5 files changed, 28 insertions(+), 10 deletions(-) diff --git a/go/cmd/dolt/commands/cnfcmds/cat.go b/go/cmd/dolt/commands/cnfcmds/cat.go index f2d45fd3ab..1df96e5617 100644 --- a/go/cmd/dolt/commands/cnfcmds/cat.go +++ b/go/cmd/dolt/commands/cnfcmds/cat.go @@ -167,7 +167,7 @@ func printConflicts(ctx context.Context, root *doltdb.RootValue, tblNames []stri nullPrinter := nullprinter.NewNullPrinter(cnfRd.GetSchema()) fwtTr := fwt.NewAutoSizingFWTTransformer(cnfRd.GetSchema(), fwt.HashFillWhenTooLong, 1000) transforms := pipeline.NewTransformCollection( - pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow), + pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow), pipeline.NamedTransform{Name: "fwt", Func: fwtTr.TransformToFWT}, ) diff --git a/go/cmd/dolt/commands/diff.go b/go/cmd/dolt/commands/diff.go index e9fa3377d1..fd58501cec 100644 --- a/go/cmd/dolt/commands/diff.go +++ b/go/cmd/dolt/commands/diff.go @@ -710,7 +710,7 @@ func buildPipeline(dArgs *diffArgs, joiner *rowconv.Joiner, ds *diff.DiffSplitte nullPrinter := nullprinter.NewNullPrinter(untypedUnionSch) fwtTr := fwt.NewAutoSizingFWTTransformer(untypedUnionSch, fwt.HashFillWhenTooLong, 1000) transforms.AppendTransforms( - pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow), + pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow), pipeline.NamedTransform{Name: fwtStageName, Func: fwtTr.TransformToFWT}, ) } diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 80fe349676..9d39cfae10 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -19,6 +19,7 @@ import ( "bytes" "context" "fmt" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json" "io" "os" "path/filepath" @@ -452,6 +453,8 @@ func getFormat(format string) (resultFormat, errhand.VerboseError) { return formatTabular, nil case "csv": return formatCsv, nil + case "json": + return formatJson, nil default: return formatTabular, errhand.BuildDError("Invalid argument for --result-format. Valid values are tabular,csv").Build() } @@ -1051,6 +1054,7 @@ type resultFormat byte const ( formatTabular resultFormat = iota formatCsv + formatJson ) type sqlEngine struct { @@ -1180,10 +1184,15 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, } }() - nullPrinter := nullprinter.NewNullPrinter(untypedSch) - p.AddStage(pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow)) + // Parts of the pipeline depend on the output format, such as how we print null values and whether we pad strings. + switch se.resultFormat { + case formatCsv: + nullPrinter := nullprinter.NewNullPrinterWithNullString(untypedSch, "") + p.AddStage(pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow)) - if se.resultFormat == formatTabular { + case formatTabular: + nullPrinter := nullprinter.NewNullPrinter(untypedSch) + p.AddStage(pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow)) autoSizeTransform := fwt.NewAutoSizingFWTTransformer(untypedSch, fwt.PrintAllWhenTooLong, 10000) p.AddStage(pipeline.NamedTransform{Name: fwtStageName, Func: autoSizeTransform.TransformToFWT}) } @@ -1198,6 +1207,8 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, wr, err = tabular.NewTextTableWriter(cliWr, untypedSch) case formatCsv: wr, err = csv.NewCSVWriter(cliWr, untypedSch, csv.NewCSVInfo()) + case formatJson: + wr, err = json.NewJSONWriter(cliWr, untypedSch) default: panic("unimplemented output format type") } diff --git a/go/libraries/doltcore/diff/sql_diff.go b/go/libraries/doltcore/diff/sql_diff.go index cb344e0a31..32de4fb451 100644 --- a/go/libraries/doltcore/diff/sql_diff.go +++ b/go/libraries/doltcore/diff/sql_diff.go @@ -188,7 +188,7 @@ func PrintSqlTableDiffs(ctx context.Context, r1, r2 *doltdb.RootValue, wr io.Wri transforms := pipeline.NewTransformCollection() nullPrinter := nullprinter.NewNullPrinter(sch) transforms.AppendTransforms( - pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow), + pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow), ) sink, err := NewSQLDiffSink(wr, sch, tblName) if err != nil { diff --git a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go index 2f3f15496b..7395cb3428 100644 --- a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go +++ b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go @@ -21,18 +21,25 @@ import ( "github.com/liquidata-inc/dolt/go/store/types" ) -const PRINTED_NULL = "" +const PrintedNull = "" -const NULL_PRINTING_STAGE = "null printing" +const NullPrintingStage = "null printing" // NullPrinter is a utility to convert nil values in rows to a string representation. type NullPrinter struct { Sch schema.Schema + nullStr string } // NewNullPrinter returns a new null printer for the schema given, which must be string-typed (untyped). func NewNullPrinter(sch schema.Schema) *NullPrinter { - return &NullPrinter{Sch: sch} + return &NullPrinter{Sch: sch, nullStr: PrintedNull} +} + +// NewNullPrinterWithNullString returns a new null printer for the schema given, which must be string-typed, using the +// string given as the value to print for nulls. +func NewNullPrinterWithNullString(sch schema.Schema, nullStr string) *NullPrinter { + return &NullPrinter{Sch: sch, nullStr: nullStr} } // Function to convert any nil values for a row with the schema given to a string representation. Used as the transform @@ -44,7 +51,7 @@ func (np *NullPrinter) ProcessRow(inRow row.Row, props pipeline.ReadableMap) (ro if !types.IsNull(val) { taggedVals[tag] = val } else { - taggedVals[tag] = types.String(PRINTED_NULL) + taggedVals[tag] = types.String(np.nullStr) } return false, nil From ce1328247cc21ae67c344cab4354f50ea0b8265f Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 15:10:52 -0700 Subject: [PATCH 04/60] Added test for json SQL output. Numeric output is wrong. Signed-off-by: Zach Musgrave --- bats/1pk5col-ints.bats | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/bats/1pk5col-ints.bats b/bats/1pk5col-ints.bats index 968bda1452..227469cdad 100755 --- a/bats/1pk5col-ints.bats +++ b/bats/1pk5col-ints.bats @@ -250,8 +250,31 @@ if rows[2] != "9,8,7,6,5,4".split(","): [ "$status" -eq 0 ] [[ "$output" =~ 'column1' ]] || false [ "${#lines[@]}" -eq 2 ] + + # Test that null values are properly output + dolt sql -q "insert into test (pk,c1) values (40,1)" + run dolt sql -q "select c1 as column1, c2 as column2, c3 as column3 from test where pk = 40" -r csv + [ "$status" -eq 0 ] + [[ "$output" =~ "column1,column2,column3" ]] || false + [[ "$output" =~ "1,," ]] || false } +@test "dolt sql select json output" { + dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" + run dolt sql -q "select c1 as column1, c2 as column2 from test" -r json + [ "$status" -eq 0 ] + [ "$output" == '{"rows": [{"column1":"1","column2":"2"},{"column1":"11","column2":"12"},{"column1":"21","column2":"22"}]}' ] + + run dolt sql -q "select c1 as column1 from test where c1=1" -r json + [ "$status" -eq 0 ] + [ "$output" == '{"rows": [{"column1":"1"}]}' ] + + # Test that null values are properly handled + dolt sql -q "insert into test (pk,c1) values (40,1)" + run dolt sql -q "select c1 as column1, c2 as column2, c3 as column3 from test where pk = 40" -r json + [ "$status" -eq 0 ] + [ "$output" == '{"rows": [{"column1":"1"}]}' ] +} @test "dolt sql select with inverted where clause" { dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" From 14489197bcf15fc6f27299fe875dd6afa5e84cc0 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 15:56:11 -0700 Subject: [PATCH 05/60] Fixed number quoting problems for JSON SQL format. Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 65 +++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 24 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 9d39cfae10..f712028c95 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -1149,7 +1149,8 @@ func (se *sqlEngine) query(ctx *sql.Context, query string) (sql.Schema, sql.RowI // Pretty prints the output of the new SQL engine func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, rowIter sql.RowIter) error { - var chanErr error + nbf := types.Format_Default + doltSch, err := dsqle.SqlSchemaToDoltResultSchema(sqlSch) if err != nil { return err @@ -1163,27 +1164,6 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, rowChannel := make(chan row.Row) p := pipeline.NewPartialPipeline(pipeline.InFuncForChannel(rowChannel)) - nbf := types.Format_Default - go func() { - defer close(rowChannel) - var sqlRow sql.Row - for sqlRow, chanErr = rowIter.Next(); chanErr == nil; sqlRow, chanErr = rowIter.Next() { - taggedVals := make(row.TaggedValues) - for i, col := range sqlRow { - if col != nil { - taggedVals[uint64(i)] = types.String(fmt.Sprintf("%v", col)) - } - } - - var r row.Row - r, chanErr = row.New(nbf, untypedSch, taggedVals) - - if chanErr == nil { - rowChannel <- r - } - } - }() - // Parts of the pipeline depend on the output format, such as how we print null values and whether we pad strings. switch se.resultFormat { case formatCsv: @@ -1244,13 +1224,50 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, p.InjectRow(fwtStageName, r) } + + // For some output formats, we want to convert everything to strings to be processed by the pipeline. For others, + // we want to leave types alone and let the writer figure out how to format it for output. + var rowFn func(r sql.Row) (row.Row, error) + switch se.resultFormat { + case formatJson: + rowFn = func(r sql.Row) (r2 row.Row, err error) { + return dsqle.SqlRowToDoltRow(nbf, r, doltSch) + } + default: + rowFn = func(r sql.Row) (r2 row.Row, err error) { + taggedVals := make(row.TaggedValues) + for i, col := range r { + if col != nil { + taggedVals[uint64(i)] = types.String(fmt.Sprintf("%v", col)) + } + } + return row.New(nbf, untypedSch, taggedVals) + } + } + + var iterErr error + + // Read rows off the row iter and pass them to the pipeline channel + go func() { + defer close(rowChannel) + var sqlRow sql.Row + for sqlRow, iterErr = rowIter.Next(); iterErr == nil; sqlRow, iterErr = rowIter.Next() { + var r row.Row + r, iterErr = rowFn(sqlRow) + + if iterErr == nil { + rowChannel <- r + } + } + }() + p.Start() if err := p.Wait(); err != nil { return fmt.Errorf("error processing results: %v", err) } - if chanErr != io.EOF { - return fmt.Errorf("error processing results: %v", chanErr) + if iterErr != io.EOF { + return fmt.Errorf("error processing results: %v", iterErr) } return nil From d003e8445ea5bcdaa9c9232e05db1654d0da8d84 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 15:56:58 -0700 Subject: [PATCH 06/60] Small naming change Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index f712028c95..dfdd8ee04c 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -1234,7 +1234,7 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, return dsqle.SqlRowToDoltRow(nbf, r, doltSch) } default: - rowFn = func(r sql.Row) (r2 row.Row, err error) { + rowFn = func(r sql.Row) (row.Row, error) { taggedVals := make(row.TaggedValues) for i, col := range r { if col != nil { From 975f48f225ce7b7c16de563790a4f83a139f6b89 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 16:11:11 -0700 Subject: [PATCH 07/60] Fixed bats test for json numeric output Signed-off-by: Zach Musgrave --- bats/1pk5col-ints.bats | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bats/1pk5col-ints.bats b/bats/1pk5col-ints.bats index 227469cdad..4edd476661 100755 --- a/bats/1pk5col-ints.bats +++ b/bats/1pk5col-ints.bats @@ -263,17 +263,17 @@ if rows[2] != "9,8,7,6,5,4".split(","): dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" run dolt sql -q "select c1 as column1, c2 as column2 from test" -r json [ "$status" -eq 0 ] - [ "$output" == '{"rows": [{"column1":"1","column2":"2"},{"column1":"11","column2":"12"},{"column1":"21","column2":"22"}]}' ] + [ "$output" == '{"rows": [{"column1":1,"column2":2},{"column1":11,"column2":12},{"column1":21,"column2":22}]}' ] run dolt sql -q "select c1 as column1 from test where c1=1" -r json [ "$status" -eq 0 ] - [ "$output" == '{"rows": [{"column1":"1"}]}' ] + [ "$output" == '{"rows": [{"column1":1}]}' ] # Test that null values are properly handled dolt sql -q "insert into test (pk,c1) values (40,1)" run dolt sql -q "select c1 as column1, c2 as column2, c3 as column3 from test where pk = 40" -r json [ "$status" -eq 0 ] - [ "$output" == '{"rows": [{"column1":"1"}]}' ] + [ "$output" == '{"rows": [{"column1":1}]}' ] } @test "dolt sql select with inverted where clause" { From 68d3df00947309c2365bb7fe76818a4bf2ec5c72 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 16:31:51 -0700 Subject: [PATCH 08/60] Added bats test for csv and json SQL output formats Signed-off-by: Zach Musgrave --- bats/sql.bats | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/bats/sql.bats b/bats/sql.bats index a6a8128a8c..0d70b4f9a0 100755 --- a/bats/sql.bats +++ b/bats/sql.bats @@ -160,6 +160,38 @@ teardown() { [[ "$output" =~ "not found" ]] || false } +@test "sql output formats" { + dolt sql < Date: Fri, 10 Apr 2020 17:00:34 -0700 Subject: [PATCH 09/60] Use the new view definition, and fixed spacing in SQL prompts Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 11 ++++++++--- go/libraries/doltcore/sqle/database.go | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index dfdd8ee04c..6af61b91ea 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -633,8 +633,11 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error { // start the doltsql shell historyFile := filepath.Join(".sqlhistory") // history file written to working dir + initialPrompt := fmt.Sprintf("%s> ", ctx.GetCurrentDatabase()) + initialMultilinePrompt := fmt.Sprintf(fmt.Sprintf("%%%ds", len(initialPrompt)), "-> ") + rlConf := readline.Config{ - Prompt: fmt.Sprintf("%s>", ctx.GetCurrentDatabase()), + Prompt: initialPrompt, Stdout: cli.CliOut, Stderr: cli.CliOut, HistoryFile: historyFile, @@ -651,7 +654,7 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error { } shell := ishell.NewUninterpreted(&shellConf) - shell.SetMultiPrompt(" -> ") + shell.SetMultiPrompt(initialMultilinePrompt) // TODO: update completer on create / drop / alter statements completer, err := newCompleter(ctx, currEnv) if err != nil { @@ -699,7 +702,9 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error { shell.Println(color.RedString(err.Error())) } - shell.SetPrompt(fmt.Sprintf("%s>", ctx.GetCurrentDatabase())) + currPrompt := fmt.Sprintf("%s> ", ctx.GetCurrentDatabase()) + shell.SetPrompt(currPrompt) + shell.SetMultiPrompt(fmt.Sprintf(fmt.Sprintf("%%%ds", len(currPrompt)), "-> ")) }) shell.Run() diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index de5b26d155..c5854dba57 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -715,7 +715,7 @@ func RegisterSchemaFragments(ctx *sql.Context, db Database, root *doltdb.RootVal if err != nil { parseErrors = append(parseErrors, err) } else { - ctx.Register(db.Name(), sql.NewView(name, cv.(*plan.CreateView).Definition)) + ctx.Register(db.Name(), cv.(*plan.CreateView).Definition.AsView()) } } r, err = iter.Next() From 68cdd50f30ffd148913c89d43f1a2ba2b8862e0e Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Fri, 10 Apr 2020 17:07:43 -0700 Subject: [PATCH 10/60] Formatting Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 3 +-- go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index dfdd8ee04c..85789036e0 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -19,7 +19,6 @@ import ( "bytes" "context" "fmt" - "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json" "io" "os" "path/filepath" @@ -46,6 +45,7 @@ import ( dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/pipeline" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/csv" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/fwt" @@ -1224,7 +1224,6 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, p.InjectRow(fwtStageName, r) } - // For some output formats, we want to convert everything to strings to be processed by the pipeline. For others, // we want to leave types alone and let the writer figure out how to format it for output. var rowFn func(r sql.Row) (row.Row, error) diff --git a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go index 7395cb3428..cc678a7bc5 100644 --- a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go +++ b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go @@ -27,7 +27,7 @@ const NullPrintingStage = "null printing" // NullPrinter is a utility to convert nil values in rows to a string representation. type NullPrinter struct { - Sch schema.Schema + Sch schema.Schema nullStr string } From 63a2f9a7365de755974d1a3ff25042653ecb15a8 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Sat, 11 Apr 2020 15:02:46 -0700 Subject: [PATCH 11/60] Initial pass at auto commit database for sql-server Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 21 +++--- go/cmd/dolt/commands/sqlserver/server.go | 89 +++++++++++++----------- go/libraries/doltcore/env/environment.go | 16 ++++- go/libraries/doltcore/env/repo_state.go | 8 +++ go/libraries/doltcore/sqle/database.go | 34 +++++++-- 5 files changed, 114 insertions(+), 54 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 6af61b91ea..08eb311f9f 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -307,7 +307,7 @@ func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dE } func execShell(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) { - dbs := CollectDBs(mrEnv, roots, dsqle.NewDatabase) + dbs := CollectDBs(mrEnv, roots, newDatabase) se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...) if err != nil { return nil, errhand.VerboseErrorFromError(err) @@ -327,7 +327,7 @@ func execShell(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do } func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, batchInput io.Reader, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) { - dbs := CollectDBs(mrEnv, roots, dsqle.NewBatchedDatabase) + dbs := CollectDBs(mrEnv, roots, newBatchedDatabase) se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...) if err != nil { return nil, errhand.VerboseErrorFromError(err) @@ -346,8 +346,16 @@ func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } +func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) +} + +func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewBatchedDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) +} + func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, query string, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) { - dbs := CollectDBs(mrEnv, roots, dsqle.NewDatabase) + dbs := CollectDBs(mrEnv, roots, newBatchedDatabase) se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...) if err != nil { return nil, errhand.VerboseErrorFromError(err) @@ -375,7 +383,7 @@ func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -type createDBFunc func(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) dsqle.Database +type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database // CollectDBs takes a MultiRepoEnv and creates Database objects from each environment and returns a slice of these // objects. @@ -383,7 +391,7 @@ func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, crea dbs := make([]dsqle.Database, 0, len(mrEnv)) _ = mrEnv.Iter(func(name string, dEnv *env.DoltEnv) (stop bool, err error) { root := roots[name] - db := createDB(name, root, dEnv.DoltDB, dEnv.RepoState) + db := createDB(name, root, dEnv) dbs = append(dbs, db) return false, nil }) @@ -1229,7 +1237,6 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, p.InjectRow(fwtStageName, r) } - // For some output formats, we want to convert everything to strings to be processed by the pipeline. For others, // we want to leave types alone and let the writer figure out how to format it for output. var rowFn func(r sql.Row) (row.Row, error) @@ -1278,8 +1285,6 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, return nil } -var ErrNotNaked = fmt.Errorf("not a naked query.") - // Checks if the query is a naked delete and then deletes all rows if so. Returns true if it did so, false otherwise. func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delete) bool { if s.Where == nil && s.Limit == nil && s.Partitions == nil && len(s.TableExprs) == 1 { diff --git a/go/cmd/dolt/commands/sqlserver/server.go b/go/cmd/dolt/commands/sqlserver/server.go index c901099e3d..028fac3410 100644 --- a/go/cmd/dolt/commands/sqlserver/server.go +++ b/go/cmd/dolt/commands/sqlserver/server.go @@ -106,7 +106,7 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se } } - dbs := commands.CollectDBs(mrEnv, roots, dsqle.NewDatabase) + dbs := commands.CollectDBs(mrEnv, roots, newAutoCommmitDatabase) for _, db := range dbs { sqlEngine.AddDatabase(db) } @@ -124,45 +124,7 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se ConnWriteTimeout: timeout, }, sqlEngine, - func(ctx context.Context, conn *mysql.Conn, host string) (sql.Session, *sql.IndexRegistry, *sql.ViewRegistry, error) { - mysqlSess := sql.NewSession(host, conn.RemoteAddr().String(), conn.User, conn.ConnectionID) - doltSess, err := dsqle.NewSessionWithDefaultRoots(mysqlSess, dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases())...) - - if err != nil { - return nil, nil, nil, err - } - - ir := sql.NewIndexRegistry() - vr := sql.NewViewRegistry() - sqlCtx := sql.NewContext( - ctx, - sql.WithIndexRegistry(ir), - sql.WithViewRegistry(vr), - sql.WithSession(doltSess)) - - dbs := commands.CollectDBs(mrEnv, roots, dsqle.NewDatabase) - for _, db := range dbs { - err := db.SetRoot(sqlCtx, db.GetDefaultRoot()) - if err != nil { - return nil, nil, nil, err - } - - err = dsqle.RegisterSchemaFragments(sqlCtx, db, db.GetDefaultRoot()) - if err != nil { - cli.PrintErr(err) - return nil, nil, nil, err - } - } - - sqlCtx.RegisterIndexDriver(dsqle.NewDoltIndexDriver(dbs...)) - err = ir.LoadIndexes(sqlCtx, sqlEngine.Catalog.AllDatabases()) - - if err != nil { - return nil, nil, nil, err - } - - return doltSess, ir, vr, nil - }, + newSessionBuilder(sqlEngine), ) if startError != nil { @@ -179,6 +141,53 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se return } +func newSessionBuilder(sqlEngine *sqle.Engine) server.SessionBuilder { + return func(ctx context.Context, conn *mysql.Conn, host string) (sql.Session, *sql.IndexRegistry, *sql.ViewRegistry, error) { + mysqlSess := sql.NewSession(host, conn.RemoteAddr().String(), conn.User, conn.ConnectionID) + doltSess, err := dsqle.NewSessionWithDefaultRoots(mysqlSess, dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases())...) + + if err != nil { + return nil, nil, nil, err + } + + ir := sql.NewIndexRegistry() + vr := sql.NewViewRegistry() + sqlCtx := sql.NewContext( + ctx, + sql.WithIndexRegistry(ir), + sql.WithViewRegistry(vr), + sql.WithSession(doltSess)) + + dbs := dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases()) + for _, db := range dbs { + err := db.SetRoot(sqlCtx, db.GetDefaultRoot()) + if err != nil { + return nil, nil, nil, err + } + + err = dsqle.RegisterSchemaFragments(sqlCtx, db, db.GetDefaultRoot()) + if err != nil { + cli.PrintErr(err) + return nil, nil, nil, err + } + } + + // TODO: this shouldn't need to happen every session + sqlCtx.RegisterIndexDriver(dsqle.NewDoltIndexDriver(dbs...)) + err = ir.LoadIndexes(sqlCtx, sqlEngine.Catalog.AllDatabases()) + + if err != nil { + return nil, nil, nil, err + } + + return doltSess, ir, vr, nil + } +} + +func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewAutoCommitDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) +} + func dbsAsDSQLDBs(dbs []sql.Database) []dsqle.Database { dsqlDBs := make([]dsqle.Database, 0, len(dbs)) diff --git a/go/libraries/doltcore/env/environment.go b/go/libraries/doltcore/env/environment.go index a4e7850f3d..3892ebc2d0 100644 --- a/go/libraries/doltcore/env/environment.go +++ b/go/libraries/doltcore/env/environment.go @@ -364,8 +364,16 @@ func (dEnv *DoltEnv) UpdateWorkingRoot(ctx context.Context, newRoot *doltdb.Root return doltdb.ErrNomsIO } - dEnv.RepoState.Working = h.String() - err = dEnv.RepoState.Save(dEnv.FS) + return dEnv.RepoStateWriter().SetWorkingHash(ctx, h) +} + +type repoStateWriter struct { + dEnv *DoltEnv +} + +func (r *repoStateWriter) SetWorkingHash(ctx context.Context, h hash.Hash) error { + r.dEnv.RepoState.Working = h.String() + err := r.dEnv.RepoState.Save(r.dEnv.FS) if err != nil { return ErrStateUpdate @@ -374,6 +382,10 @@ func (dEnv *DoltEnv) UpdateWorkingRoot(ctx context.Context, newRoot *doltdb.Root return nil } +func (dEnv *DoltEnv) RepoStateWriter() RepoStateWriter { + return &repoStateWriter{dEnv} +} + func (dEnv *DoltEnv) HeadRoot(ctx context.Context) (*doltdb.RootValue, error) { commit, err := dEnv.DoltDB.Resolve(ctx, dEnv.RepoState.CWBHeadSpec()) diff --git a/go/libraries/doltcore/env/repo_state.go b/go/libraries/doltcore/env/repo_state.go index 4f7eb0386e..46ea113f3c 100644 --- a/go/libraries/doltcore/env/repo_state.go +++ b/go/libraries/doltcore/env/repo_state.go @@ -15,6 +15,7 @@ package env import ( + "context" "encoding/json" "github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb" @@ -30,6 +31,13 @@ type RepoStateReader interface { StagedHash() hash.Hash } +type RepoStateWriter interface { + // SetCWBHeadRef(context.Context, ref.DoltRef) error + // SetCWBHeadSpec(context.Context, *doltdb.CommitSpec) error + SetWorkingHash(context.Context, hash.Hash) error +// SetStagedHash(context.Context, hash.Hash) error +} + type BranchConfig struct { Merge ref.MarshalableRef `json:"head"` Remote string `json:"remote"` diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index c5854dba57..9415297455 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -37,15 +37,16 @@ import ( "github.com/liquidata-inc/dolt/go/store/hash" ) -type batchMode bool +type commitBehavior int8 var ErrInvalidTableName = errors.NewKind("Invalid table name %s. Table names must match the regular expression " + doltdb.TableNameRegexStr) var ErrReservedTableName = errors.NewKind("Invalid table name %s. Table names beginning with `dolt_` are reserved for internal use") var ErrSystemTableAlter = errors.NewKind("Cannot alter table %s: system tables cannot be dropped or altered") const ( - batched batchMode = true - single batchMode = false + batched commitBehavior = iota + single + autoCommit ) type tableCache struct { @@ -106,7 +107,8 @@ type Database struct { defRoot *doltdb.RootValue ddb *doltdb.DoltDB rsr env.RepoStateReader - batchMode batchMode + rsw env.RepoStateWriter + batchMode commitBehavior tc *tableCache } @@ -141,6 +143,21 @@ func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, } } +// NewAutoCommitDatabase returns a new dolt database executing in autocommit mode. Every write operation will update +// the working set with the new root value. +func NewAutoCommitDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database { + return Database{ + name: name, + defRoot: root, + ddb: ddb, + rsr: rsr, + rsw: rsw, + batchMode: autoCommit, + tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)}, + } +} + + // Name returns the name of this database, set at creation time. func (db Database) Name() string { return db.name @@ -464,6 +481,15 @@ func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { dsess := DSessFromSess(ctx.Session) dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot} + if db.batchMode == autoCommit { + h, err := db.ddb.WriteRootValue(ctx, newRoot) + if err != nil { + return err + } + + return db.rsw.SetWorkingHash(ctx, h) + } + return nil } From f1cfc12d973f6bec0b4efe278795bd379e26968c Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Sat, 11 Apr 2020 15:52:38 -0700 Subject: [PATCH 12/60] created uint64set and tests --- go/libraries/utils/set/uint64set.go | 53 ++++++++++++++++++++++++ go/libraries/utils/set/uint64set_test.go | 50 ++++++++++++++++++++++ 2 files changed, 103 insertions(+) create mode 100644 go/libraries/utils/set/uint64set.go create mode 100644 go/libraries/utils/set/uint64set_test.go diff --git a/go/libraries/utils/set/uint64set.go b/go/libraries/utils/set/uint64set.go new file mode 100644 index 0000000000..279e834699 --- /dev/null +++ b/go/libraries/utils/set/uint64set.go @@ -0,0 +1,53 @@ +// Copyright 2019 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package set + +type Uint64Set struct { + uints map[uint64]interface{} +} + +func NewUint64Set(uints []uint64) *Uint64Set { + s := &Uint64Set{make(map[uint64]interface{}, len(uints))} + + for _, b := range uints { + s.uints[b] = emptyInstance + } + + return s +} + +func (us *Uint64Set) Contains(i uint64) bool { + _, present := us.uints[i] + return present +} + +func (us *Uint64Set) ContainsAll(uints []uint64) bool { + for _, b := range uints { + if _, present := us.uints[b]; !present { + return false + } + } + + return true +} + +func (us *Uint64Set) Add(i uint64) { + us.uints[i] = emptyInstance +} + +func (us *Uint64Set) Size() int { + return len(us.uints) +} + diff --git a/go/libraries/utils/set/uint64set_test.go b/go/libraries/utils/set/uint64set_test.go new file mode 100644 index 0000000000..895fce32b0 --- /dev/null +++ b/go/libraries/utils/set/uint64set_test.go @@ -0,0 +1,50 @@ +// Copyright 2019 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package set + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewUint64Set(t *testing.T) { + initData := []uint64{0, 1, 2, 3} + us := NewUint64Set(initData) + + // test .Size() + assert.Equal(t, 4, us.Size()) + + // test .Contains() + for _, id := range initData { + assert.True(t, us.Contains(id)) + } + assert.False(t, us.Contains(5)) + + + // test .ContainsAll() + assert.True(t, us.ContainsAll([]uint64{0, 1})) + assert.False(t, us.ContainsAll([]uint64{0, 1, 2, 5})) + + // test .Add() + us.Add(6) + assert.True(t, us.Contains(6)) + assert.Equal(t, 5, us.Size()) + for _, id := range initData { + assert.True(t, us.Contains(id)) + } + assert.True(t, us.ContainsAll(append(initData, 6))) + +} From 749ff298e93ad3dfc0849025fbe637e6489c3c83 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Sat, 11 Apr 2020 16:03:28 -0700 Subject: [PATCH 13/60] updating existing tag set with each tag creation --- go/libraries/doltcore/doltdb/root_val.go | 4 +++- go/libraries/doltcore/schema/super_schema.go | 5 +++++ go/libraries/doltcore/schema/tag.go | 7 ++++--- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/go/libraries/doltcore/doltdb/root_val.go b/go/libraries/doltcore/doltdb/root_val.go index 21a1deeb1f..751039e239 100644 --- a/go/libraries/doltcore/doltdb/root_val.go +++ b/go/libraries/doltcore/doltdb/root_val.go @@ -277,9 +277,11 @@ func (root *RootValue) GenerateTagsForNewColumns(ctx context.Context, tableName } newTags := make([]uint64, len(newColNames)) + existingTags := set.NewUint64Set(rootSuperSchema.AllTags()) for i := range newTags { - newTags[i] = schema.AutoGenerateTag(rootSuperSchema, tableName, existingColKinds, newColNames[i], newColKinds[i]) + newTags[i] = schema.AutoGenerateTag(existingTags, tableName, existingColKinds, newColNames[i], newColKinds[i]) existingColKinds = append(existingColKinds, newColKinds[i]) + existingTags.Add(newTags[i]) } return newTags, nil diff --git a/go/libraries/doltcore/schema/super_schema.go b/go/libraries/doltcore/schema/super_schema.go index 58d12080ed..d64803f407 100644 --- a/go/libraries/doltcore/schema/super_schema.go +++ b/go/libraries/doltcore/schema/super_schema.go @@ -120,6 +120,11 @@ func (ss *SuperSchema) AllColumnNames(tag uint64) []string { return ss.tagNames[tag] } +// AllTags returns a slice of all tags contained in the SuperSchema +func (ss *SuperSchema) AllTags() []uint64 { + return ss.allCols.Tags +} + // LatestColumnName returns the latest name of the column corresponding to tag func (ss *SuperSchema) LatestColumnName(tag uint64) string { return ss.tagNames[tag][0] diff --git a/go/libraries/doltcore/schema/tag.go b/go/libraries/doltcore/schema/tag.go index 10c187aaa5..d2426785db 100644 --- a/go/libraries/doltcore/schema/tag.go +++ b/go/libraries/doltcore/schema/tag.go @@ -18,6 +18,7 @@ import ( "crypto/sha512" "encoding/binary" "fmt" + "github.com/liquidata-inc/dolt/go/libraries/utils/set" "math/rand" "regexp" "strings" @@ -40,11 +41,11 @@ func ErrTagPrevUsed(tag uint64, newColName, tableName string) error { // and repositories that perform the same sequence of mutations to a database will get equivalent databases as a result. // DETERMINISTIC MUTATION IS A CRITICAL INVARIANT TO MAINTAINING COMPATIBILITY BETWEEN REPOSITORIES. // DO NOT ALTER THIS METHOD. -func AutoGenerateTag(rootSS *SuperSchema, tableName string, existingColKinds []types.NomsKind, newColName string, newColKind types.NomsKind) uint64 { +func AutoGenerateTag(existingTags *set.Uint64Set, tableName string, existingColKinds []types.NomsKind, newColName string, newColKind types.NomsKind) uint64 { // DO NOT ALTER THIS METHOD (see above) var maxTagVal uint64 = 128 * 128 - for maxTagVal/2 < uint64(rootSS.Size()) { + for maxTagVal/2 < uint64(existingTags.Size()) { if maxTagVal >= ReservedTagMin-1 { panic("There is no way anyone should ever have this many columns. You are a bad person if you hit this panic.") } else if maxTagVal*128 < maxTagVal { @@ -60,7 +61,7 @@ func AutoGenerateTag(rootSS *SuperSchema, tableName string, existingColKinds []t for { randTag = uint64(randGen.Int63n(int64(maxTagVal))) - if _, found := rootSS.GetByTag(randTag); !found { + if !existingTags.Contains(randTag) { break } } From 73d843e3cf02a1f36ca9b7f3b71703ab6652813d Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Sun, 12 Apr 2020 16:21:19 -0700 Subject: [PATCH 14/60] Made sqle.Database a pointer receiver again, because we need to persist updated root values across sessions. Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 24 +++---- go/cmd/dolt/commands/sqlserver/server.go | 8 +-- go/libraries/doltcore/sqle/database.go | 80 ++++++++++------------ go/libraries/doltcore/sqle/dolt_session.go | 2 +- go/libraries/doltcore/sqle/indexes.go | 8 +-- go/libraries/doltcore/sqle/schema_table.go | 2 +- go/libraries/doltcore/sqle/tables.go | 2 +- go/libraries/doltcore/sqle/testutil.go | 2 +- 8 files changed, 60 insertions(+), 68 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 08eb311f9f..da0e72b420 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -346,11 +346,11 @@ func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { +func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) } -func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { +func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { return dsqle.NewBatchedDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) } @@ -383,12 +383,12 @@ func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database +type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database // CollectDBs takes a MultiRepoEnv and creates Database objects from each environment and returns a slice of these // objects. -func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []dsqle.Database { - dbs := make([]dsqle.Database, 0, len(mrEnv)) +func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []*dsqle.Database { + dbs := make([]*dsqle.Database, 0, len(mrEnv)) _ = mrEnv.Iter(func(name string, dEnv *env.DoltEnv) (stop bool, err error) { root := roots[name] db := createDB(name, root, dEnv) @@ -906,7 +906,7 @@ func (s *stats) shouldFlush() bool { } func flushBatchedEdits(ctx *sql.Context, se *sqlEngine) error { - err := se.iterDBs(func(_ string, db dsqle.Database) (bool, error) { + err := se.iterDBs(func(_ string, db *dsqle.Database) (bool, error) { err := db.Flush(ctx) if err != nil { @@ -1071,7 +1071,7 @@ const ( ) type sqlEngine struct { - dbs map[string]dsqle.Database + dbs map[string]*dsqle.Database mrEnv env.MultiRepoEnv engine *sqle.Engine resultFormat resultFormat @@ -1080,11 +1080,11 @@ type sqlEngine struct { var ErrDBNotFoundKind = errors.NewKind("database '%s' not found") // sqlEngine packages up the context necessary to run sql queries against sqle. -func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat, dbs ...dsqle.Database) (*sqlEngine, error) { +func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat, dbs ...*dsqle.Database) (*sqlEngine, error) { engine := sqle.NewDefault() engine.AddDatabase(sql.NewInformationSchemaDatabase(engine.Catalog)) - nameToDB := make(map[string]dsqle.Database) + nameToDB := make(map[string]*dsqle.Database) for _, db := range dbs { nameToDB[db.Name()] = db root := roots[db.Name()] @@ -1110,17 +1110,17 @@ func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string] return &sqlEngine{nameToDB, mrEnv, engine, format}, nil } -func (se *sqlEngine) getDB(name string) (dsqle.Database, error) { +func (se *sqlEngine) getDB(name string) (*dsqle.Database, error) { db, ok := se.dbs[name] if !ok { - return dsqle.Database{}, ErrDBNotFoundKind.New(name) + return nil, ErrDBNotFoundKind.New(name) } return db, nil } -func (se *sqlEngine) iterDBs(cb func(name string, db dsqle.Database) (stop bool, err error)) error { +func (se *sqlEngine) iterDBs(cb func(name string, db *dsqle.Database) (stop bool, err error)) error { for name, db := range se.dbs { stop, err := cb(name, db) diff --git a/go/cmd/dolt/commands/sqlserver/server.go b/go/cmd/dolt/commands/sqlserver/server.go index 028fac3410..c2439131f1 100644 --- a/go/cmd/dolt/commands/sqlserver/server.go +++ b/go/cmd/dolt/commands/sqlserver/server.go @@ -184,15 +184,15 @@ func newSessionBuilder(sqlEngine *sqle.Engine) server.SessionBuilder { } } -func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { +func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { return dsqle.NewAutoCommitDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) } -func dbsAsDSQLDBs(dbs []sql.Database) []dsqle.Database { - dsqlDBs := make([]dsqle.Database, 0, len(dbs)) +func dbsAsDSQLDBs(dbs []sql.Database) []*dsqle.Database { + dsqlDBs := make([]*dsqle.Database, 0, len(dbs)) for _, db := range dbs { - dsqlDB, ok := db.(dsqle.Database) + dsqlDB, ok := db.(*dsqle.Database) if ok { dsqlDBs = append(dsqlDBs, dsqlDB) diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index 9415297455..1b2650d91f 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -112,15 +112,15 @@ type Database struct { tc *tableCache } -var _ sql.Database = Database{} -var _ sql.VersionedDatabase = Database{} -var _ sql.TableDropper = Database{} -var _ sql.TableCreator = Database{} -var _ sql.TableRenamer = Database{} +var _ sql.Database = (*Database)(nil) +var _ sql.VersionedDatabase = (*Database)(nil) +var _ sql.TableDropper = (*Database)(nil) +var _ sql.TableCreator = (*Database)(nil) +var _ sql.TableRenamer = (*Database)(nil) // NewDatabase returns a new dolt database to use in queries. -func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) Database { - return Database{ +func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) *Database { + return &Database{ name: name, defRoot: defRoot, ddb: ddb, @@ -132,8 +132,8 @@ func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr // NewBatchedDatabase returns a new dolt database executing in batch insert mode. Integrators must call Flush() to // commit any outstanding edits. -func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) Database { - return Database{ +func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) *Database { + return &Database{ name: name, defRoot: root, ddb: ddb, @@ -145,8 +145,8 @@ func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, // NewAutoCommitDatabase returns a new dolt database executing in autocommit mode. Every write operation will update // the working set with the new root value. -func NewAutoCommitDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database { - return Database{ +func NewAutoCommitDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) *Database { + return &Database{ name: name, defRoot: root, ddb: ddb, @@ -159,23 +159,23 @@ func NewAutoCommitDatabase(name string, root *doltdb.RootValue, ddb *doltdb.Dolt // Name returns the name of this database, set at creation time. -func (db Database) Name() string { +func (db *Database) Name() string { return db.name } // GetDefaultRoot returns the default root of the database that is used by new sessions. -func (db Database) GetDefaultRoot() *doltdb.RootValue { +func (db *Database) GetDefaultRoot() *doltdb.RootValue { return db.defRoot } // GetDoltDB gets the underlying DoltDB of the Database -func (db Database) GetDoltDB() *doltdb.DoltDB { +func (db *Database) GetDoltDB() *doltdb.DoltDB { return db.ddb } // GetTableInsensitive is used when resolving tables in queries. It returns a best-effort case-insensitive match for // the table name given. -func (db Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Table, bool, error) { +func (db *Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Table, bool, error) { root, err := db.GetRoot(ctx) if err != nil { @@ -185,7 +185,7 @@ func (db Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Ta return db.GetTableInsensitiveWithRoot(ctx, root, tblName) } -func (db Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltdb.RootValue, tblName string) (sql.Table, bool, error) { +func (db *Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltdb.RootValue, tblName string) (sql.Table, bool, error) { lwrName := strings.ToLower(tblName) if strings.HasPrefix(lwrName, DoltDiffTablePrefix) { tblName = tblName[len(DoltDiffTablePrefix):] @@ -217,7 +217,7 @@ func (db Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltdb } // GetTableInsensitiveAsOf implements sql.VersionedDatabase -func (db Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, asOf interface{}) (sql.Table, bool, error) { +func (db *Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, asOf interface{}) (sql.Table, bool, error) { root, err := db.rootAsOf(ctx, asOf) if err != nil { return nil, false, err @@ -230,7 +230,7 @@ func (db Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, a // rootAsOf returns the root of the DB as of the expression given, which may be nil in the case that it refers to an // expression before the first commit. -func (db Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootValue, error) { +func (db *Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootValue, error) { switch x := asOf.(type) { case string: return db.getRootForCommitRef(ctx, x) @@ -241,7 +241,7 @@ func (db Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootVal } } -func (db Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.RootValue, error) { +func (db *Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.RootValue, error) { cs, err := doltdb.NewCommitSpec("HEAD", db.rsr.CWBHeadRef().String()) if err != nil { return nil, err @@ -283,7 +283,7 @@ func (db Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.Roo return nil, nil } -func (db Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*doltdb.RootValue, error) { +func (db *Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*doltdb.RootValue, error) { cs, err := doltdb.NewCommitSpec(commitRef, db.rsr.CWBHeadRef().String()) if err != nil { return nil, err @@ -303,7 +303,7 @@ func (db Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*dol } // GetTableNamesAsOf implements sql.VersionedDatabase -func (db Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]string, error) { +func (db *Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]string, error) { root, err := db.rootAsOf(ctx, time) if err != nil { return nil, err @@ -320,7 +320,7 @@ func (db Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]stri // getTable gets the table with the exact name given at the root value given. The database caches tables for all root // values to avoid doing schema lookups on every table lookup, which are expensive. -func (db Database) getTable(ctx context.Context, root *doltdb.RootValue, tableName string) (sql.Table, bool, error) { +func (db *Database) getTable(ctx context.Context, root *doltdb.RootValue, tableName string) (sql.Table, bool, error) { if table, ok := db.tc.Get(tableName, root); ok { return table, true, nil } @@ -368,7 +368,7 @@ func (db Database) getTable(ctx context.Context, root *doltdb.RootValue, tableNa // are filtered out. This method is used for queries that examine the schema of the database, e.g. show tables. Table // name resolution in queries is handled by GetTableInsensitive. Use GetAllTableNames for an unfiltered list of all // tables in user space. -func (db Database) GetTableNames(ctx *sql.Context) ([]string, error) { +func (db *Database) GetTableNames(ctx *sql.Context) ([]string, error) { tblNames, err := db.GetAllTableNames(ctx) if err != nil { return nil, err @@ -378,7 +378,7 @@ func (db Database) GetTableNames(ctx *sql.Context) ([]string, error) { // GetAllTableNames returns all user-space tables, including system tables in user space // (e.g. dolt_docs, dolt_query_catalog). -func (db Database) GetAllTableNames(ctx *sql.Context) ([]string, error) { +func (db *Database) GetAllTableNames(ctx *sql.Context) ([]string, error) { root, err := db.GetRoot(ctx) if err != nil { @@ -402,13 +402,13 @@ func filterDoltInternalTables(tblNames []string) []string { return result } -func (db Database) headKeyForDB() string { +func (db *Database) headKeyForDB() string { return fmt.Sprintf("%s_head", db.name) } var hashType = sql.MustCreateString(query.Type_TEXT, 32, sql.Collation_ascii_bin) -func (db Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { +func (db *Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { dsess := DSessFromSess(ctx.Session) currRoot, dbRootOk := dsess.dbRoots[db.name] @@ -458,16 +458,7 @@ func (db Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { // Set a new root value for the database. Can be used if the dolt working // set value changes outside of the basic SQL execution engine. -func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { - // Need to decide on what behavior we want here. Currently all sql-server processing is done - // in memory and is never written to disk. Can leave it like this and commit as part of a - // transaction, or something similar. - /*h, err := db.ddb.WriteRootValue(ctx, newRoot) - - if err != nil { - return err - }*/ - +func (db *Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { h, err := newRoot.HashOf() if err != nil { @@ -487,6 +478,7 @@ func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { return err } + db.defRoot = newRoot return db.rsw.SetWorkingHash(ctx, h) } @@ -494,7 +486,7 @@ func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { } // DropTable drops the table with the name given -func (db Database) DropTable(ctx *sql.Context, tableName string) error { +func (db *Database) DropTable(ctx *sql.Context, tableName string) error { root, err := db.GetRoot(ctx) if err != nil { @@ -523,7 +515,7 @@ func (db Database) DropTable(ctx *sql.Context, tableName string) error { } // CreateTable creates a table with the name and schema given. -func (db Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schema) error { +func (db *Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schema) error { if doltdb.HasDoltPrefix(tableName) { return ErrReservedTableName.New(tableName) } @@ -547,7 +539,7 @@ func (db Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schem } // Unlike the exported version, createTable doesn't enforce any table name checks. -func (db Database) createTable(ctx *sql.Context, tableName string, sch sql.Schema) error { +func (db *Database) createTable(ctx *sql.Context, tableName string, sch sql.Schema) error { root, err := db.GetRoot(ctx) if err != nil { @@ -589,7 +581,7 @@ func (db Database) createTable(ctx *sql.Context, tableName string, sch sql.Schem } // RenameTable implements sql.TableRenamer -func (db Database) RenameTable(ctx *sql.Context, oldName, newName string) error { +func (db *Database) RenameTable(ctx *sql.Context, oldName, newName string) error { root, err := db.GetRoot(ctx) if err != nil { @@ -618,7 +610,7 @@ func (db Database) RenameTable(ctx *sql.Context, oldName, newName string) error } // Flush flushes the current batch of outstanding changes and returns any errors. -func (db Database) Flush(ctx *sql.Context) error { +func (db *Database) Flush(ctx *sql.Context) error { root, err := db.GetRoot(ctx) if err != nil { @@ -647,7 +639,7 @@ func (db Database) Flush(ctx *sql.Context) error { // CreateView implements sql.ViewCreator. Persists the view in the dolt database, so // it can exist in a sql session later. Returns sql.ErrExistingView if a view // with that name already exists. -func (db Database) CreateView(ctx *sql.Context, name string, definition string) error { +func (db *Database) CreateView(ctx *sql.Context, name string, definition string) error { tbl, err := GetOrCreateDoltSchemasTable(ctx, db) if err != nil { return err @@ -674,7 +666,7 @@ func (db Database) CreateView(ctx *sql.Context, name string, definition string) // DropView implements sql.ViewDropper. Removes a view from persistence in the // dolt database. Returns sql.ErrNonExistingView if the view did not // exist. -func (db Database) DropView(ctx *sql.Context, name string) error { +func (db *Database) DropView(ctx *sql.Context, name string) error { stbl, found, err := db.GetTableInsensitive(ctx, doltdb.SchemasTableName) if err != nil { return err @@ -708,7 +700,7 @@ func (db Database) DropView(ctx *sql.Context, name string) error { // there are I/O issues, but currently silently fails to register some // schema fragments if they don't parse, or if registries within the // `catalog` return errors. -func RegisterSchemaFragments(ctx *sql.Context, db Database, root *doltdb.RootValue) error { +func RegisterSchemaFragments(ctx *sql.Context, db *Database, root *doltdb.RootValue) error { stbl, found, err := db.GetTableInsensitiveWithRoot(ctx, root, doltdb.SchemasTableName) if err != nil { return err diff --git a/go/libraries/doltcore/sqle/dolt_session.go b/go/libraries/doltcore/sqle/dolt_session.go index fb291f5f95..bab780c69d 100644 --- a/go/libraries/doltcore/sqle/dolt_session.go +++ b/go/libraries/doltcore/sqle/dolt_session.go @@ -37,7 +37,7 @@ func DefaultDoltSession() *DoltSession { } // NewSessionWithDefaultRoot creates a DoltSession object from a standard sql.Session and 0 or more Database objects. -func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...Database) (*DoltSession, error) { +func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...*Database) (*DoltSession, error) { dbRoots := make(map[string]dbRoot) for _, db := range dbs { defRoot := db.GetDefaultRoot() diff --git a/go/libraries/doltcore/sqle/indexes.go b/go/libraries/doltcore/sqle/indexes.go index 95b11647fa..cab31495e6 100644 --- a/go/libraries/doltcore/sqle/indexes.go +++ b/go/libraries/doltcore/sqle/indexes.go @@ -29,11 +29,11 @@ import ( // IndexDriver implementation. Not ready for prime time. type DoltIndexDriver struct { - dbs map[string]Database + dbs map[string]*Database } -func NewDoltIndexDriver(dbs ...Database) *DoltIndexDriver { - nameToDB := make(map[string]Database) +func NewDoltIndexDriver(dbs ...*Database) *DoltIndexDriver { + nameToDB := make(map[string]*Database) for _, db := range dbs { nameToDB[db.Name()] = db } @@ -91,7 +91,7 @@ func (i *DoltIndexDriver) LoadAll(ctx *sql.Context, db, table string) ([]sql.Ind type doltIndex struct { sch schema.Schema tableName string - db Database + db *Database driver *DoltIndexDriver } diff --git a/go/libraries/doltcore/sqle/schema_table.go b/go/libraries/doltcore/sqle/schema_table.go index cb9304bf61..361be97624 100644 --- a/go/libraries/doltcore/sqle/schema_table.go +++ b/go/libraries/doltcore/sqle/schema_table.go @@ -35,7 +35,7 @@ func SchemasTableSchema() sql.Schema { } // GetOrCreateDoltSchemasTable returns the `dolt_schemas` table in `db`, creating it if it does not already exist. -func GetOrCreateDoltSchemasTable(ctx *sql.Context, db Database) (*WritableDoltTable, error) { +func GetOrCreateDoltSchemasTable(ctx *sql.Context, db *Database) (*WritableDoltTable, error) { tbl, found, err := db.GetTableInsensitive(ctx, doltdb.SchemasTableName) if err != nil { return nil, err diff --git a/go/libraries/doltcore/sqle/tables.go b/go/libraries/doltcore/sqle/tables.go index e5d17a3c3e..821d9c5621 100644 --- a/go/libraries/doltcore/sqle/tables.go +++ b/go/libraries/doltcore/sqle/tables.go @@ -35,7 +35,7 @@ type DoltTable struct { table *doltdb.Table sch schema.Schema sqlSch sql.Schema - db Database + db *Database } var _ sql.Table = (*DoltTable)(nil) diff --git a/go/libraries/doltcore/sqle/testutil.go b/go/libraries/doltcore/sqle/testutil.go index bfb75c2de2..d933aa8e36 100644 --- a/go/libraries/doltcore/sqle/testutil.go +++ b/go/libraries/doltcore/sqle/testutil.go @@ -99,7 +99,7 @@ func NewTestSQLCtx(ctx context.Context) *sql.Context { } // NewTestEngine creates a new default engine, and a *sql.Context and initializes indexes and schema fragments. -func NewTestEngine(ctx context.Context, db Database, root *doltdb.RootValue) (*sqle.Engine, *sql.Context, error) { +func NewTestEngine(ctx context.Context, db *Database, root *doltdb.RootValue) (*sqle.Engine, *sql.Context, error) { engine := sqle.NewDefault() engine.AddDatabase(db) From 4663b02153455814c2fe7b63b453ed96129d8154 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Sun, 12 Apr 2020 18:10:29 -0700 Subject: [PATCH 15/60] fixed migration bugs --- bats/compatibility/corona-virus-test.sh | 2 +- go/libraries/doltcore/rebase/rebase_tag.go | 242 +++++++++++++-------- go/libraries/doltcore/schema/tag.go | 2 +- go/libraries/utils/set/uint64set.go | 15 ++ go/libraries/utils/set/uint64set_test.go | 19 +- 5 files changed, 190 insertions(+), 90 deletions(-) diff --git a/bats/compatibility/corona-virus-test.sh b/bats/compatibility/corona-virus-test.sh index 68b7c60ec8..6760142eb3 100755 --- a/bats/compatibility/corona-virus-test.sh +++ b/bats/compatibility/corona-virus-test.sh @@ -99,7 +99,7 @@ local_bin="`pwd`"/"$bin" PATH="$local_bin":"$PATH" dolt clone Liquidata/corona-virus pushd "corona-virus" PATH="$local_bin":"$PATH" export_tables "-pre" -dolt migrate +time dolt migrate export_tables "-post" diff_tables echo "success!" diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index a152606348..650a8eb8be 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -16,8 +16,8 @@ package rebase import ( "context" - "errors" "fmt" + "github.com/liquidata-inc/dolt/go/libraries/utils/set" "time" "github.com/liquidata-inc/dolt/go/libraries/doltcore/diff" @@ -107,30 +107,28 @@ func MigrateUniqueTags(ctx context.Context, dEnv *env.DoltEnv) error { headCommits = append(headCommits, cm) } - // DFS the commit graph find a unique new tag for all existing tags in every table in history - globalMapping := make(map[string]map[uint64]uint64) + if len(branches) != len(headCommits) { + panic("error in uniquifying tags") + } - replay := func(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (*doltdb.RootValue, error) { - err := buildGlobalTagMapping(ctx, root, parentRoot, rebasedParentRoot, globalMapping) + // DFS the commit graph find a unique new tag for all existing tags in every table in history + replay := func(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (rebaseRoot *doltdb.RootValue, err error) { + tagMapping, err := buildTagMapping(ctx, root, parentRoot, rebasedParentRoot) if err != nil { return nil, err } - return root, nil + err = validateTagMapping(tagMapping) + + if err != nil { + return nil, err + } + + return replayCommitWithNewTag(ctx, root, parentRoot, rebasedParentRoot, tagMapping) } - _, err = rebase(ctx, ddb, replay, entireHistory, headCommits...) - - if err != nil { - return err - } - - if len(branches) != len(headCommits) { - panic("error in uniquifying tags") - } - - newCommits, err := TagRebaseForCommits(ctx, ddb, globalMapping, headCommits...) + newCommits, err := rebase(ctx, ddb, replay, entireHistory, headCommits...) if err != nil { return err @@ -255,14 +253,36 @@ func TagRebaseForCommits(ctx context.Context, ddb *doltdb.DoltDB, tm TagMapping, func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue, tm TagMapping) (*doltdb.RootValue, error) { - newRoot := root - for tblName, tableMapping := range tm { - tbl, found, err := newRoot.GetTable(ctx, tblName) + tableNames, err := doltdb.UnionTableNames(ctx, root, rebasedParentRoot) + + if err != nil { + return nil, err + } + + newRoot := rebasedParentRoot + for _, tblName := range tableNames { + + tbl, found, err := root.GetTable(ctx, tblName) if err != nil { return nil, err } if !found { + // table was deleted since parent commit + ok, err := newRoot.HasTable(ctx, tblName) + if err != nil { + return nil, err + } + if !ok { + return nil, fmt.Errorf("error rebasing, table %s not found in rebasedParentRoot", tblName) + } + + newRoot, err = newRoot.RemoveTables(ctx, tblName) + + if err != nil { + return nil, err + } + continue } @@ -271,16 +291,21 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent return nil, err } - // tags may not exist in this commit - tagExists := false - for oldTag, _ := range tableMapping { - if _, found := sch.GetAllCols().GetByTag(oldTag); found { - tagExists = true - break - } + // only rebase this table if we have a mapping for it, and at least one of the + // tags in the mapping is present in its schema at this commit + tableNeedsRebasing := false + tableMapping, found := tm[tblName] + if found { + _ = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { + if _, found = tableMapping[tag]; found { + tableNeedsRebasing = true + } + return tableNeedsRebasing, nil + }) } - if !tagExists { - continue + + if !tableNeedsRebasing { + newRoot, err = newRoot.PutTable(ctx, tblName, tbl) } parentTblName := tblName @@ -305,7 +330,7 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent rebasedSch := schema.SchemaFromCols(schCC) // super schema rebase - ss, _, err := newRoot.GetSuperSchema(ctx, tblName) + ss, _, err := root.GetSuperSchema(ctx, tblName) if err != nil { return nil, err @@ -379,14 +404,14 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent return nil, err } - rebasedRoot, err := newRoot.PutSuperSchema(ctx, tblName, rebasedSS) + newRoot, err = newRoot.PutSuperSchema(ctx, tblName, rebasedSS) if err != nil { return nil, err } // create new RootValue by overwriting table with rebased rows and schema - newRoot, err = rebasedRoot.PutTable(ctx, tblName, rebasedTable) + newRoot, err = newRoot.PutTable(ctx, tblName, rebasedTable) if err != nil { return nil, err @@ -397,6 +422,16 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, rebasedParentRows types.Map, tagMapping map[uint64]uint64) (types.Map, error) { + unmappedTags := set.NewUint64Set(rSch.GetAllCols().Tags) + tm := make(map[uint64]uint64) + for ot, nt := range tagMapping { + tm[ot] = nt + unmappedTags.Remove(nt) + } + for _, t := range unmappedTags.AsSlice() { + tm[t] = t + } + // we will apply modified differences to the rebasedParent rebasedRowEditor := rebasedParentRows.Edit() @@ -608,91 +643,126 @@ func validateTagMapping(tagMapping TagMapping) error { return nil } -func buildGlobalTagMapping(ctx context.Context, root *doltdb.RootValue, parentRoot *doltdb.RootValue, rebasedParentRoot *doltdb.RootValue, globalMapping map[string]map[uint64]uint64) error { +func buildTagMapping(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (TagMapping, error) { + tagMapping := make(map[string]map[uint64]uint64) + + parentTblNames, err := parentRoot.GetTableNames(ctx) + + if err != nil { + return nil, err + } + + // collect existing mapping + for _, tn := range parentTblNames { + if _, found := tagMapping[tn]; !found { + tagMapping[tn] = make(map[uint64]uint64) + } + + rpt, found, err := rebasedParentRoot.GetTable(ctx, tn) + if err != nil { + return nil, err + } + if !found { + return nil, fmt.Errorf("error rebasing, table %s not found in rebased parent root", tn) + } + + pt, _, err := parentRoot.GetTable(ctx, tn) + if err != nil { + return nil, err + } + + rps, err := rpt.GetSchema(ctx) + if err != nil { + return nil, err + } + + ps, err := pt.GetSchema(ctx) + if err != nil { + return nil, err + } + + err = ps.GetAllCols().Iter(func(oldTag uint64, col schema.Column) (stop bool, err error) { + rebasedCol, found := rps.GetAllCols().GetByName(col.Name) + if !found { + return true, fmt.Errorf("error rebasing, column %s not found in rebased parent root", col.Name) + } + tagMapping[tn][oldTag] = rebasedCol.Tag + return false, nil + }) + + if err != nil { + return nil, err + } + } + + + // create mappings for new columns tblNames, err := root.GetTableNames(ctx) if err != nil { - return err + return nil, err } + rss, err := doltdb.GetRootValueSuperSchema(ctx, rebasedParentRoot) + + if err != nil { + return nil, err + } + + existingRebasedTags := set.NewUint64Set(rss.AllTags()) + for _, tn := range tblNames { if doltdb.HasDoltPrefix(tn) { - err = handleSystemTableMappings(ctx, tn, root, globalMapping) + err = handleSystemTableMappings(ctx, tn, root, tagMapping) if err != nil { - return err + return nil, err } continue } - if _, found := globalMapping[tn]; !found { - globalMapping[tn] = make(map[uint64]uint64) + if _, found := tagMapping[tn]; !found { + tagMapping[tn] = make(map[uint64]uint64) } t, _, err := root.GetTable(ctx, tn) if err != nil { - return err + return nil, err } sch, err := t.GetSchema(ctx) if err != nil { - return err + return nil, err } - foundParent, err := parentRoot.HasTable(ctx, tn) - if err != nil { - return err - } - - // for this table, get the new columns in root since parentRoot - var cc *schema.ColCollection - var parentSS *schema.SuperSchema - if foundParent { - var found bool - parentSS, found, err = parentRoot.GetSuperSchema(ctx, tn) - if err != nil { - return err - } - if !found { - return fmt.Errorf("error generating unique tags for migration, cannot find super schema for table %s", tn) - } - - cc, _ = schema.NewColCollection() - err = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { - if _, found := parentSS.GetByTag(tag); !found { - cc, err = cc.Append(col) - } - stop = err != nil - return stop, err - }) - } else { - cc = sch.GetAllCols() - } - - var colNames []string - var colKinds []types.NomsKind + var newColNames []string + var newColKinds []types.NomsKind var oldTags []uint64 - _ = cc.Iter(func(tag uint64, col schema.Column) (stop bool, err error) { - colNames = append(colNames, col.Name) - colKinds = append(colKinds, col.Kind) - oldTags = append(oldTags, tag) + var existingColKinds []types.NomsKind + _ = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { + _, found := tagMapping[tn][tag] + if !found { + newColNames = append(newColNames, col.Name) + newColKinds = append(newColKinds, col.Kind) + oldTags = append(oldTags, tag) + } else { + existingColKinds = append(existingColKinds, col.Kind) + } return false, nil }) - newTags, err := rebasedParentRoot.GenerateTagsForNewColumns(ctx, tn, colNames, colKinds) - if err != nil { - return err - } - if len(oldTags) != len(newTags) { - return errors.New("error generating unique tags for migration") + // generate tags with the same mether as root.GenerateTagsForNewColumns() + newTags := make([]uint64, len(newColNames)) + for i := range newTags { + newTags[i] = schema.AutoGenerateTag(existingRebasedTags, tn, existingColKinds, newColNames[i], newColKinds[i]) + existingColKinds = append(existingColKinds, newColKinds[i]) + existingRebasedTags.Add(newTags[i]) } for i, ot := range oldTags { - if _, found := globalMapping[tn][ot]; !found { - globalMapping[tn][ot] = newTags[i] - } + tagMapping[tn][ot] = newTags[i] } } - return nil + return tagMapping, nil } func handleSystemTableMappings(ctx context.Context, tblName string, root *doltdb.RootValue, globalMapping map[string]map[uint64]uint64) error { diff --git a/go/libraries/doltcore/schema/tag.go b/go/libraries/doltcore/schema/tag.go index d2426785db..dadcbf3660 100644 --- a/go/libraries/doltcore/schema/tag.go +++ b/go/libraries/doltcore/schema/tag.go @@ -18,11 +18,11 @@ import ( "crypto/sha512" "encoding/binary" "fmt" - "github.com/liquidata-inc/dolt/go/libraries/utils/set" "math/rand" "regexp" "strings" + "github.com/liquidata-inc/dolt/go/libraries/utils/set" "github.com/liquidata-inc/dolt/go/store/types" ) diff --git a/go/libraries/utils/set/uint64set.go b/go/libraries/utils/set/uint64set.go index 279e834699..81adb61c36 100644 --- a/go/libraries/utils/set/uint64set.go +++ b/go/libraries/utils/set/uint64set.go @@ -14,6 +14,8 @@ package set +import "sort" + type Uint64Set struct { uints map[uint64]interface{} } @@ -47,6 +49,19 @@ func (us *Uint64Set) Add(i uint64) { us.uints[i] = emptyInstance } +func (us *Uint64Set) Remove(i uint64) { + delete(us.uints, i) +} + +func (us *Uint64Set) AsSlice() []uint64 { + sl := make([]uint64, 0, us.Size()) + for k := range us.uints { + sl = append(sl, k) + } + sort.Slice(sl, func(i, j int) bool { return sl[i] < sl[j] }) + return sl +} + func (us *Uint64Set) Size() int { return len(us.uints) } diff --git a/go/libraries/utils/set/uint64set_test.go b/go/libraries/utils/set/uint64set_test.go index 895fce32b0..2603dd8a37 100644 --- a/go/libraries/utils/set/uint64set_test.go +++ b/go/libraries/utils/set/uint64set_test.go @@ -31,12 +31,12 @@ func TestNewUint64Set(t *testing.T) { for _, id := range initData { assert.True(t, us.Contains(id)) } - assert.False(t, us.Contains(5)) + assert.False(t, us.Contains(19)) // test .ContainsAll() assert.True(t, us.ContainsAll([]uint64{0, 1})) - assert.False(t, us.ContainsAll([]uint64{0, 1, 2, 5})) + assert.False(t, us.ContainsAll([]uint64{0, 1, 2, 19})) // test .Add() us.Add(6) @@ -47,4 +47,19 @@ func TestNewUint64Set(t *testing.T) { } assert.True(t, us.ContainsAll(append(initData, 6))) + // test .Remove() + us.Remove(0) + assert.False(t, us.Contains(0)) + assert.Equal(t, 4, us.Size()) + + us.Remove(19) + assert.Equal(t, 4, us.Size()) + + // test .AsSlice() + s := us.AsSlice() + assert.Equal(t, []uint64{1, 2, 3, 6}, s) + + us.Add(4) + s = us.AsSlice() + assert.Equal(t, []uint64{1, 2, 3, 4, 6}, s) } From f7db5450acdd70dba8d7244b9d16ffb3cff242ec Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Sun, 12 Apr 2020 18:16:31 -0700 Subject: [PATCH 16/60] migration optimizations --- go/libraries/doltcore/rebase/rebase_tag.go | 249 ++++++++++++--------- 1 file changed, 146 insertions(+), 103 deletions(-) diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index 650a8eb8be..764cd45dbf 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -17,7 +17,6 @@ package rebase import ( "context" "fmt" - "github.com/liquidata-inc/dolt/go/libraries/utils/set" "time" "github.com/liquidata-inc/dolt/go/libraries/doltcore/diff" @@ -27,10 +26,15 @@ import ( "github.com/liquidata-inc/dolt/go/libraries/doltcore/row" "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema" "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/encoding" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed" + "github.com/liquidata-inc/dolt/go/libraries/utils/set" ndiff "github.com/liquidata-inc/dolt/go/store/diff" + "github.com/liquidata-inc/dolt/go/store/hash" "github.com/liquidata-inc/dolt/go/store/types" ) +const diffBufSize = 4096 + // { tableName -> { oldTag -> newTag }} type TagMapping map[string]map[uint64]uint64 @@ -111,21 +115,40 @@ func MigrateUniqueTags(ctx context.Context, dEnv *env.DoltEnv) error { panic("error in uniquifying tags") } + builtTagMappings := make(map[hash.Hash]TagMapping) + // DFS the commit graph find a unique new tag for all existing tags in every table in history replay := func(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (rebaseRoot *doltdb.RootValue, err error) { - tagMapping, err := buildTagMapping(ctx, root, parentRoot, rebasedParentRoot) - + h, err := rebasedParentRoot.HashOf() if err != nil { return nil, err } - err = validateTagMapping(tagMapping) + parentTagMapping, found := builtTagMappings[h] + if !found { + parentTagMapping = make(TagMapping) + if !rootsMustBeEqual(parentRoot, rebasedParentRoot) { + return nil, fmt.Errorf("error rebasing, roots not equal") + } + } + tagMapping, err := buildTagMapping(ctx, root, rebasedParentRoot, parentTagMapping) if err != nil { return nil, err } - return replayCommitWithNewTag(ctx, root, parentRoot, rebasedParentRoot, tagMapping) + rebasedRoot, err := replayCommitWithNewTag(ctx, root, parentRoot, rebasedParentRoot, tagMapping) + if err != nil { + return nil, err + } + + rh, err := rebasedRoot.HashOf() + if err != nil { + return nil, err + } + builtTagMappings[rh] = tagMapping + + return rebasedRoot, nil } newCommits, err := rebase(ctx, ddb, replay, entireHistory, headCommits...) @@ -435,44 +458,41 @@ func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, r // we will apply modified differences to the rebasedParent rebasedRowEditor := rebasedParentRows.Edit() - ad := diff.NewAsyncDiffer(1024) + ad := diff.NewAsyncDiffer(diffBufSize) // get all differences (including merges) between original commit and its parent ad.Start(ctx, rows, parentRows) defer ad.Close() for { - diffs, err := ad.GetDiffs(1, time.Second) - if ad.IsDone() { break } + diffs, err := ad.GetDiffs(diffBufSize/2, time.Second) + if err != nil { return types.EmptyMap, err } - if len(diffs) != 1 { - panic("only a single diff requested, multiple returned. bug in AsyncDiffer") - } + for _, d := range diffs { + if d.KeyValue == nil { + panic("Unexpected commit diff result: with nil key value encountered") + } - d := diffs[0] - if d.KeyValue == nil { - panic("Unexpected commit diff result: with nil key value encountered") - } + key, newVal, err := modifyDifferenceTag(d, rows.Format(), rSch, tm) - key, newVal, err := modifyDifferenceTag(d, rows.Format(), rSch, tagMapping) + if err != nil { + return types.EmptyMap, nil + } - if err != nil { - return types.EmptyMap, nil - } - - switch d.ChangeType { - case types.DiffChangeAdded: - rebasedRowEditor.Set(key, newVal) - case types.DiffChangeRemoved: - rebasedRowEditor.Remove(key) - case types.DiffChangeModified: - rebasedRowEditor.Set(key, newVal) + switch d.ChangeType { + case types.DiffChangeAdded: + rebasedRowEditor.Set(key, newVal) + case types.DiffChangeRemoved: + rebasedRowEditor.Remove(key) + case types.DiffChangeModified: + rebasedRowEditor.Set(key, newVal) + } } } @@ -484,11 +504,11 @@ func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, ro return rows, nil } - eq, err := schema.SchemasAreEqual(sch, parentSch) + deletedCols, err := typed.TypedColCollectionSubtraction(parentSch, sch) if err != nil { return types.EmptyMap, err } - if eq { + if deletedCols.Size() == 0 { return rows, nil } @@ -553,45 +573,99 @@ func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, ro return prunedRowData, nil } -func modifyDifferenceTag(d *ndiff.Difference, nbf *types.NomsBinFormat, rSch schema.Schema, tagMapping map[uint64]uint64) (key types.LesserValuable, val types.Valuable, err error) { - ktv, err := row.ParseTaggedValues(d.KeyValue.(types.Tuple)) +func modifyDifferenceTag(d *ndiff.Difference, nbf *types.NomsBinFormat, rSch schema.Schema, tagMapping map[uint64]uint64) (keyTup types.LesserValuable, valTup types.Valuable, err error) { + + k := d.KeyValue.(types.Tuple) + if k.Len()%2 != 0 { + panic("A tagged tuple must have an even column count.") + } + + kItr, err := k.Iterator() + if err != nil { + return nil, nil, err + } + + idx := 0 + kk := make([]types.Value, k.Len()) + for kItr.HasMore() { + _, tag, err := kItr.Next() + if err != nil { + return nil, nil, err + } + + // i.HasMore() is true here because of assertion above. + _, val, err := kItr.Next() + if err != nil { + return nil, nil, err + } + + if tag.Kind() != types.UintKind { + panic("Invalid tagged tuple must have uint tags.") + } + + if val != types.NullValue { + newTag := tagMapping[uint64(tag.(types.Uint))] + kk[idx] = types.Uint(newTag) + kk[idx+1] = val + } + idx += 2 + } + + keyTup, err = types.NewTuple(nbf, kk...) if err != nil { return nil, nil, err } - newKtv := make(row.TaggedValues) - for tag, val := range ktv { - newTag, found := tagMapping[tag] - if !found { - newTag = tag - } - newKtv[newTag] = val + if d.NewValue == nil { + return keyTup, nil, nil } - key = newKtv.NomsTupleForTags(nbf, rSch.GetPKCols().Tags, true) + v := d.NewValue.(types.Tuple) + if v.Len()%2 != 0 { + panic("A tagged tuple must have an even column count.") + } - val = d.NewValue - if d.NewValue != nil { - tv, err := row.ParseTaggedValues(d.NewValue.(types.Tuple)) + vItr, err := v.Iterator() + if err != nil { + return nil, nil, err + } + idx = 0 + vv := make([]types.Value, v.Len()) + for vItr.HasMore() { + _, tag, err := vItr.Next() if err != nil { return nil, nil, err } - newTv := make(row.TaggedValues) - for tag, val := range tv { - newTag, found := tagMapping[tag] - if !found { - newTag = tag - } - newTv[newTag] = val + // i.HasMore() is true here because of assertion above. + _, val, err := vItr.Next() + if err != nil { + return nil, nil, err } - val = newTv.NomsTupleForTags(nbf, rSch.GetNonPKCols().Tags, false) + if tag.Kind() != types.UintKind { + panic("Invalid tagged tuple must have uint tags.") + } + + if val != types.NullValue { + newTag, ok := tagMapping[uint64(tag.(types.Uint))] + if ok { + vv[idx] = types.Uint(newTag) + vv[idx+1] = val + idx += 2 + } + } } - return key, val, nil + valTup, err = types.NewTuple(nbf, vv[:idx]...) + + if err != nil { + return nil, nil, err + } + + return keyTup, valTup, nil } func tagExistsInHistory(ctx context.Context, c *doltdb.Commit, tagMapping TagMapping) (bool, error) { @@ -643,58 +717,8 @@ func validateTagMapping(tagMapping TagMapping) error { return nil } -func buildTagMapping(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (TagMapping, error) { - tagMapping := make(map[string]map[uint64]uint64) - - parentTblNames, err := parentRoot.GetTableNames(ctx) - - if err != nil { - return nil, err - } - - // collect existing mapping - for _, tn := range parentTblNames { - if _, found := tagMapping[tn]; !found { - tagMapping[tn] = make(map[uint64]uint64) - } - - rpt, found, err := rebasedParentRoot.GetTable(ctx, tn) - if err != nil { - return nil, err - } - if !found { - return nil, fmt.Errorf("error rebasing, table %s not found in rebased parent root", tn) - } - - pt, _, err := parentRoot.GetTable(ctx, tn) - if err != nil { - return nil, err - } - - rps, err := rpt.GetSchema(ctx) - if err != nil { - return nil, err - } - - ps, err := pt.GetSchema(ctx) - if err != nil { - return nil, err - } - - err = ps.GetAllCols().Iter(func(oldTag uint64, col schema.Column) (stop bool, err error) { - rebasedCol, found := rps.GetAllCols().GetByName(col.Name) - if !found { - return true, fmt.Errorf("error rebasing, column %s not found in rebased parent root", col.Name) - } - tagMapping[tn][oldTag] = rebasedCol.Tag - return false, nil - }) - - if err != nil { - return nil, err - } - } - +func buildTagMapping(ctx context.Context, root, rebasedParentRoot *doltdb.RootValue, parentTagMapping TagMapping) (TagMapping, error) { + tagMapping := parentTagMapping // create mappings for new columns tblNames, err := root.GetTableNames(ctx) @@ -762,6 +786,13 @@ func buildTagMapping(ctx context.Context, root, parentRoot, rebasedParentRoot *d tagMapping[tn][ot] = newTags[i] } } + + err = validateTagMapping(tagMapping) + + if err != nil { + return nil, err + } + return tagMapping, nil } @@ -810,3 +841,15 @@ func handleSystemTableMappings(ctx context.Context, tblName string, root *doltdb return nil } + +func rootsMustBeEqual(r1, r2 *doltdb.RootValue) bool { + h1, err := r1.HashOf() + if err != nil { + panic(err) + } + h2, err := r2.HashOf() + if err != nil { + panic(err) + } + return h1.Equal(h2) +} \ No newline at end of file From 86b87baeb746e69d6195b1c63bba293e9e8c7289 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Sun, 12 Apr 2020 18:18:42 -0700 Subject: [PATCH 17/60] repo fmt --- go/libraries/doltcore/rebase/rebase_tag.go | 3 +-- go/libraries/utils/set/uint64set.go | 1 - go/libraries/utils/set/uint64set_test.go | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index 764cd45dbf..8a271cf52d 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -276,7 +276,6 @@ func TagRebaseForCommits(ctx context.Context, ddb *doltdb.DoltDB, tm TagMapping, func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue, tm TagMapping) (*doltdb.RootValue, error) { - tableNames, err := doltdb.UnionTableNames(ctx, root, rebasedParentRoot) if err != nil { @@ -852,4 +851,4 @@ func rootsMustBeEqual(r1, r2 *doltdb.RootValue) bool { panic(err) } return h1.Equal(h2) -} \ No newline at end of file +} diff --git a/go/libraries/utils/set/uint64set.go b/go/libraries/utils/set/uint64set.go index 81adb61c36..ddd6e7c022 100644 --- a/go/libraries/utils/set/uint64set.go +++ b/go/libraries/utils/set/uint64set.go @@ -65,4 +65,3 @@ func (us *Uint64Set) AsSlice() []uint64 { func (us *Uint64Set) Size() int { return len(us.uints) } - diff --git a/go/libraries/utils/set/uint64set_test.go b/go/libraries/utils/set/uint64set_test.go index 2603dd8a37..0d84659ae9 100644 --- a/go/libraries/utils/set/uint64set_test.go +++ b/go/libraries/utils/set/uint64set_test.go @@ -33,7 +33,6 @@ func TestNewUint64Set(t *testing.T) { } assert.False(t, us.Contains(19)) - // test .ContainsAll() assert.True(t, us.ContainsAll([]uint64{0, 1})) assert.False(t, us.ContainsAll([]uint64{0, 1, 2, 19})) From 8cbc28c7d15bed18ddd954dc0ce0944fd723b318 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 10:13:17 -0700 Subject: [PATCH 18/60] Dockerfile: Bump golang version; use mod=readonly. --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9f958fdddd..35716d8d6f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ -FROM golang:1.13.5-buster as builder +FROM golang:1.14.2-buster as builder WORKDIR /root/building/go COPY ./go/ . -RUN go mod vendor -RUN go build -mod=vendor -o dolt ./cmd/dolt +ENV GOFLAGS="-mod=readonly" +RUN go build -o dolt ./cmd/dolt FROM ubuntu:18.04 COPY --from=builder /root/building/go/dolt /usr/local/bin/dolt From 7594e6954d1ba4596ca7e2ca36f5d5d5e4fcf6ea Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Mon, 13 Apr 2020 10:23:10 -0700 Subject: [PATCH 19/60] Added handling for OkResult to sql command Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index da0e72b420..3d51d5546c 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -1162,6 +1162,10 @@ func (se *sqlEngine) query(ctx *sql.Context, query string) (sql.Schema, sql.RowI // Pretty prints the output of the new SQL engine func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, rowIter sql.RowIter) error { + if isOkResult(sqlSch) { + return printOKResult(ctx, rowIter) + } + nbf := types.Format_Default doltSch, err := dsqle.SqlSchemaToDoltResultSchema(sqlSch) @@ -1285,6 +1289,33 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, return nil } +func printOKResult(ctx context.Context, iter sql.RowIter) error { + row, err := iter.Next() + defer iter.Close() + + if err != nil { + return err + } + + if okResult, ok := row[0].(sql.OkResult); ok { + rowNoun := "row" + if okResult.RowsAffected > 1 { + rowNoun = "rows" + } + cli.Printf("Query OK, %d %s affected\n", okResult.RowsAffected, rowNoun) + + if okResult.Info != "" { + cli.Printf("%s\n", okResult.Info) + } + } + + return nil +} + +func isOkResult(sch sql.Schema) bool { + return sch.Equals(sql.OkResultSchema) +} + // Checks if the query is a naked delete and then deletes all rows if so. Returns true if it did so, false otherwise. func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delete) bool { if s.Where == nil && s.Limit == nil && s.Partitions == nil && len(s.TableExprs) == 1 { @@ -1320,7 +1351,8 @@ func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delet return false } - printRowIter := sql.RowsToRowIter(sql.NewRow(rowData.Len())) + result := sql.OkResult{RowsAffected: rowData.Len()} + printRowIter := sql.RowsToRowIter(sql.NewRow(result)) emptyMap, err := types.NewMap(ctx, root.VRW()) if err != nil { @@ -1337,7 +1369,7 @@ func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delet return false } - _ = se.prettyPrintResults(ctx, sql.Schema{{Name: "updated", Type: sql.Uint64}}, printRowIter) + _ = se.prettyPrintResults(ctx, sql.OkResultSchema, printRowIter) db, err := se.getDB(dbName) if err != nil { From 92eb71d3774b0e87d7ad6998b741bd59952bb23e Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 11:00:50 -0700 Subject: [PATCH 20/60] go/libraries/doltcore/env: paths.go: Consult HOME environment variable for home location before consulting os/user. --- go/libraries/doltcore/env/paths.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/go/libraries/doltcore/env/paths.go b/go/libraries/doltcore/env/paths.go index 2b54f236bc..a513b09ecc 100644 --- a/go/libraries/doltcore/env/paths.go +++ b/go/libraries/doltcore/env/paths.go @@ -23,6 +23,7 @@ import ( ) const ( + homeEnvVar = "HOME" doltRootPathEnvVar = "DOLT_ROOT_PATH" credsDir = "creds" @@ -46,7 +47,9 @@ func GetCurrentUserHomeDir() (string, error) { if doltRootPath, ok := os.LookupEnv(doltRootPathEnvVar); ok && doltRootPath != "" { return doltRootPath, nil } - + if homeEnvPath, ok := os.LookupEnv(homeEnvVar); ok && homeEnvPath != "" { + return homeEnvPath, nil + } if usr, err := user.Current(); err != nil { return "", err } else { From c3df38f0407f0f622102ab62fa8c0403160c5dac Mon Sep 17 00:00:00 2001 From: Timothy Sehn Date: Mon, 13 Apr 2020 11:19:10 -0700 Subject: [PATCH 21/60] SQL Reserved word and SQL Keyword in column name tests --- bats/create-tables.bats | 53 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/bats/create-tables.bats b/bats/create-tables.bats index 461d9ecc54..ad7d15259f 100755 --- a/bats/create-tables.bats +++ b/bats/create-tables.bats @@ -451,6 +451,59 @@ SQL [ "$status" -eq 1 ] } +@test "create a table with a SQL reserved word" { + dolt sql < Date: Mon, 13 Apr 2020 11:57:56 -0700 Subject: [PATCH 22/60] Support for printing the version string on connection to a dolt sql server Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sqlserver/server.go | 2 ++ go/cmd/dolt/commands/sqlserver/sqlserver.go | 9 ++++++--- go/cmd/dolt/dolt.go | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/go/cmd/dolt/commands/sqlserver/server.go b/go/cmd/dolt/commands/sqlserver/server.go index c2439131f1..2dc72aa0c1 100644 --- a/go/cmd/dolt/commands/sqlserver/server.go +++ b/go/cmd/dolt/commands/sqlserver/server.go @@ -16,6 +16,7 @@ package sqlserver import ( "context" + "fmt" "net" "strconv" "time" @@ -122,6 +123,7 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se Auth: userAuth, ConnReadTimeout: timeout, ConnWriteTimeout: timeout, + Version: fmt.Sprintf("Dolt version %s", serverConfig.Version), }, sqlEngine, newSessionBuilder(sqlEngine), diff --git a/go/cmd/dolt/commands/sqlserver/sqlserver.go b/go/cmd/dolt/commands/sqlserver/sqlserver.go index 00c78453dc..f362a35caf 100644 --- a/go/cmd/dolt/commands/sqlserver/sqlserver.go +++ b/go/cmd/dolt/commands/sqlserver/sqlserver.go @@ -49,7 +49,9 @@ Currently, only {{.EmphasisLeft}}SELECT{{.EmphasisRight}} statements are operati }, } -type SqlServerCmd struct{} +type SqlServerCmd struct { + VersionStr string +} // Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command func (cmd SqlServerCmd) Name() string { @@ -95,11 +97,12 @@ func (cmd SqlServerCmd) RequiresRepo() bool { // Exec executes the command func (cmd SqlServerCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { - return SqlServerImpl(ctx, commandStr, args, dEnv, nil) + return cmd.startServer(ctx, commandStr, args, dEnv, nil) } -func SqlServerImpl(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int { +func (cmd SqlServerCmd) startServer(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int { serverConfig := DefaultServerConfig() + serverConfig.Version = cmd.VersionStr ap := createArgParser(serverConfig) help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, sqlServerDocs, ap)) diff --git a/go/cmd/dolt/dolt.go b/go/cmd/dolt/dolt.go index d9d7355f20..fc0b418ec6 100644 --- a/go/cmd/dolt/dolt.go +++ b/go/cmd/dolt/dolt.go @@ -52,7 +52,7 @@ var doltCommand = cli.NewSubCommandHandler("dolt", "it's git for data", []cli.Co commands.ResetCmd{}, commands.CommitCmd{}, commands.SqlCmd{VersionStr: Version}, - sqlserver.SqlServerCmd{}, + sqlserver.SqlServerCmd{VersionStr: Version}, commands.LogCmd{}, commands.DiffCmd{}, commands.BlameCmd{}, From 776aff078b767f9184ebfca9f38d463347fc26ce Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 11:59:38 -0700 Subject: [PATCH 23/60] fix nits --- bats/compatibility/corona-virus-test.sh | 2 +- go/libraries/doltcore/rebase/rebase_tag.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bats/compatibility/corona-virus-test.sh b/bats/compatibility/corona-virus-test.sh index 6760142eb3..68b7c60ec8 100755 --- a/bats/compatibility/corona-virus-test.sh +++ b/bats/compatibility/corona-virus-test.sh @@ -99,7 +99,7 @@ local_bin="`pwd`"/"$bin" PATH="$local_bin":"$PATH" dolt clone Liquidata/corona-virus pushd "corona-virus" PATH="$local_bin":"$PATH" export_tables "-pre" -time dolt migrate +dolt migrate export_tables "-post" diff_tables echo "success!" diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index 8a271cf52d..9fb20a28c5 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -773,7 +773,7 @@ func buildTagMapping(ctx context.Context, root, rebasedParentRoot *doltdb.RootVa return false, nil }) - // generate tags with the same mether as root.GenerateTagsForNewColumns() + // generate tags with the same method as root.GenerateTagsForNewColumns() newTags := make([]uint64, len(newColNames)) for i := range newTags { newTags[i] = schema.AutoGenerateTag(existingRebasedTags, tn, existingColKinds, newColNames[i], newColKinds[i]) From 148ebcb621de6bfe6269dca706d622989402d4e2 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 11:59:49 -0700 Subject: [PATCH 24/60] go/cmd/dolt: commands/login.go: Change behavior so dolt login with existing creds checks server for association before opening a browser. --- go/cmd/dolt/commands/login.go | 78 ++++++++++++++++++++++++----------- 1 file changed, 54 insertions(+), 24 deletions(-) diff --git a/go/cmd/dolt/commands/login.go b/go/cmd/dolt/commands/login.go index 772491538b..d57be4b232 100644 --- a/go/cmd/dolt/commands/login.go +++ b/go/cmd/dolt/commands/login.go @@ -96,6 +96,15 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string, return HandleVErrAndExitCode(verr, usage) } +// Specifies behavior of the login. +type loginBehavior int +// When logging in with newly minted credentials, they cannot be on the server +// yet. So open the browser immediately before checking the server. +var openBrowserFirst loginBehavior = 1 +// When logging in with supplied credentials, they may already be associated +// with an account on the server. Check first before opening a browser. +var checkCredentialsThenOpenBrowser loginBehavior = 2 + func loginWithNewCreds(ctx context.Context, dEnv *env.DoltEnv) errhand.VerboseError { path, dc, err := actions.NewCredsFile(dEnv) @@ -105,7 +114,7 @@ func loginWithNewCreds(ctx context.Context, dEnv *env.DoltEnv) errhand.VerboseEr cli.Println(path) - return loginWithCreds(ctx, dEnv, dc) + return loginWithCreds(ctx, dEnv, dc, openBrowserFirst) } func loginWithExistingCreds(ctx context.Context, dEnv *env.DoltEnv, idOrPubKey string) errhand.VerboseError { @@ -127,49 +136,70 @@ func loginWithExistingCreds(ctx context.Context, dEnv *env.DoltEnv, idOrPubKey s return errhand.BuildDError("error: failed to load creds from file").AddCause(err).Build() } - return loginWithCreds(ctx, dEnv, dc) + return loginWithCreds(ctx, dEnv, dc, checkCredentialsThenOpenBrowser) } -func loginWithCreds(ctx context.Context, dEnv *env.DoltEnv, dc creds.DoltCreds) errhand.VerboseError { - loginUrl := dEnv.Config.GetStringOrDefault(env.AddCredsUrlKey, env.DefaultLoginUrl) - url := fmt.Sprintf("%s#%s", *loginUrl, dc.PubKeyBase32Str()) - - cli.Printf("Opening a browser to:\n\t%s\nPlease associate your key with your account.\n", url) - open.Start(url) - - host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost) - port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort) - conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, dc) - - if err != nil { - return errhand.BuildDError("error: unable to connect to server with credentials.").AddCause(err).Build() +func loginWithCreds(ctx context.Context, dEnv *env.DoltEnv, dc creds.DoltCreds, behavior loginBehavior) errhand.VerboseError { + grpcClient, verr := getCredentialsClient(dEnv, dc) + if verr != nil { + return verr } - grpcClient := remotesapi.NewCredentialsServiceClient(conn) - - cli.Println("Checking remote server looking for key association.") - - var prevMsgLen int var whoAmI *remotesapi.WhoAmIResponse - for whoAmI == nil { - prevMsgLen = cli.DeleteAndPrint(prevMsgLen, "requesting update") + var err error + if behavior == checkCredentialsThenOpenBrowser { whoAmI, err = grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{}) + } + if whoAmI == nil { + openBrowserForCredsAdd(dEnv, dc) + cli.Println("Checking remote server looking for key association.") + } + + linePrinter := func () func(line string) { + prevMsgLen := 0 + return func(line string) { + prevMsgLen = cli.DeleteAndPrint(prevMsgLen, line) + } + }() + + for whoAmI == nil { + linePrinter("requesting update") + whoAmI, err = grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{}) if err != nil { for i := 0; i < loginRetryInterval; i++ { - prevMsgLen = cli.DeleteAndPrint(prevMsgLen, fmt.Sprintf("Retrying in %d", loginRetryInterval-i)) + linePrinter(fmt.Sprintf("Retrying in %d", loginRetryInterval-i)) time.Sleep(time.Second) } + } else { + cli.Printf("\n\n") } } - cli.Printf("\n\nKey successfully associated with user: %s email %s\n", whoAmI.Username, whoAmI.EmailAddress) + cli.Printf("Key successfully associated with user: %s email %s\n", whoAmI.Username, whoAmI.EmailAddress) updateConfig(dEnv, whoAmI, dc) return nil } +func openBrowserForCredsAdd(dEnv *env.DoltEnv, dc creds.DoltCreds) { + loginUrl := dEnv.Config.GetStringOrDefault(env.AddCredsUrlKey, env.DefaultLoginUrl) + url := fmt.Sprintf("%s#%s", *loginUrl, dc.PubKeyBase32Str()) + cli.Printf("Opening a browser to:\n\t%s\nPlease associate your key with your account.\n", url) + open.Start(url) +} + +func getCredentialsClient(dEnv *env.DoltEnv, dc creds.DoltCreds) (remotesapi.CredentialsServiceClient, errhand.VerboseError) { + host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost) + port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort) + conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, dc) + if err != nil { + return nil, errhand.BuildDError("error: unable to connect to server with credentials.").AddCause(err).Build() + } + return remotesapi.NewCredentialsServiceClient(conn), nil +} + func updateConfig(dEnv *env.DoltEnv, whoAmI *remotesapi.WhoAmIResponse, dCreds creds.DoltCreds) { gcfg, hasGCfg := dEnv.Config.GetConfig(env.GlobalConfig) From 8ac32b3e9f1326d265ac70ac60bc01c4ea5e8763 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 12:03:50 -0700 Subject: [PATCH 25/60] fix copyright years --- go/libraries/utils/set/uint64set.go | 2 +- go/libraries/utils/set/uint64set_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/go/libraries/utils/set/uint64set.go b/go/libraries/utils/set/uint64set.go index ddd6e7c022..261e919c27 100644 --- a/go/libraries/utils/set/uint64set.go +++ b/go/libraries/utils/set/uint64set.go @@ -1,4 +1,4 @@ -// Copyright 2019 Liquidata, Inc. +// Copyright 2020 Liquidata, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/go/libraries/utils/set/uint64set_test.go b/go/libraries/utils/set/uint64set_test.go index 0d84659ae9..fb02dfc445 100644 --- a/go/libraries/utils/set/uint64set_test.go +++ b/go/libraries/utils/set/uint64set_test.go @@ -1,4 +1,4 @@ -// Copyright 2019 Liquidata, Inc. +// Copyright 2020 Liquidata, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 49a99e8036823fbb924989331b73359744936015 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 12:50:07 -0700 Subject: [PATCH 26/60] repofmt. --- go/cmd/dolt/commands/login.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/go/cmd/dolt/commands/login.go b/go/cmd/dolt/commands/login.go index d57be4b232..c4a87ea87c 100644 --- a/go/cmd/dolt/commands/login.go +++ b/go/cmd/dolt/commands/login.go @@ -98,9 +98,11 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string, // Specifies behavior of the login. type loginBehavior int + // When logging in with newly minted credentials, they cannot be on the server // yet. So open the browser immediately before checking the server. var openBrowserFirst loginBehavior = 1 + // When logging in with supplied credentials, they may already be associated // with an account on the server. Check first before opening a browser. var checkCredentialsThenOpenBrowser loginBehavior = 2 @@ -156,7 +158,7 @@ func loginWithCreds(ctx context.Context, dEnv *env.DoltEnv, dc creds.DoltCreds, cli.Println("Checking remote server looking for key association.") } - linePrinter := func () func(line string) { + linePrinter := func() func(line string) { prevMsgLen := 0 return func(line string) { prevMsgLen = cli.DeleteAndPrint(prevMsgLen, line) From c639909bf551a7642178358a88945cac2cff3e00 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 13:19:31 -0700 Subject: [PATCH 27/60] working --- go/cmd/dolt/commands/migrate.go | 169 ++++++++++++++++++--- go/libraries/doltcore/rebase/rebase_tag.go | 6 +- 2 files changed, 155 insertions(+), 20 deletions(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index 152872b070..1cc8634827 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -16,17 +16,24 @@ package commands import ( "context" - + "errors" + "fmt" "github.com/fatih/color" "github.com/liquidata-inc/dolt/go/cmd/dolt/cli" "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" "github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase" + "github.com/liquidata-inc/dolt/go/libraries/utils/argparser" "github.com/liquidata-inc/dolt/go/libraries/utils/filesys" ) -const migrationPrompt = `Run "dolt migrate" to update this repository to the latest format` -const migrationMsg = "Migrating repository to the latest format" +const ( + migrationPrompt = `Run "dolt migrate" to update this repository to the latest format` + migrationMsg = "Migrating repository to the latest format" + + migratePushFlag = "push" + migratePullFlag = "pull" +) type MigrateCmd struct{} @@ -45,31 +52,159 @@ func (cmd MigrateCmd) CreateMarkdown(_ filesys.Filesys, _, _ string) error { return nil } -// Version displays the version of the running dolt client + +func (cmd MigrateCmd) createArgParser() *argparser.ArgParser { + ap := argparser.NewArgParser() + ap.SupportsFlag(migratePushFlag, "", "") + ap.SupportsFlag(migratePullFlag, "", "") + return ap +} + // Exec executes the command -func (cmd MigrateCmd) Exec(ctx context.Context, _ string, _ []string, dEnv *env.DoltEnv) int { - needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv) - if err != nil { - cli.PrintErrf(color.RedString("error checking for repository migration: %s", err.Error())) +func (cmd MigrateCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { + ap := cmd.createArgParser() + help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, pushDocs, ap)) + apr := cli.ParseArgs(ap, args, help) + + if apr.Contains(migratePushFlag) && apr.Contains(migratePullFlag) { + cli.PrintErrf(color.RedString("options --%s and --%s are mutually exclusive", migratePushFlag, migratePullFlag)) return 1 } - if !needed { - cli.Println("Repository format is up to date") - return 0 - } - cli.Println(color.YellowString(migrationMsg)) - err = rebase.MigrateUniqueTags(ctx, dEnv) + var err error + switch { + case apr.Contains(migratePushFlag): + err = pushMigratedRepo(ctx, dEnv, apr) + case apr.Contains(migratePullFlag): + err = fetchMigratedRemoteBranches(ctx, dEnv, apr) + default: + err = migrateLocalRepo(ctx, dEnv) + } + if err != nil { - cli.PrintErrf("error migrating repository: %s", err.Error()) + cli.PrintErrf(color.RedString("error migrating: %s", err.Error())) return 1 } + return 0 } +func migrateLocalRepo(ctx context.Context, dEnv *env.DoltEnv) error { + localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB) + + if err != nil { + return err + } + + if localMigrationNeeded { + cli.Println(color.YellowString(migrationMsg)) + err = rebase.MigrateUniqueTags(ctx, dEnv) + + if err != nil { + return err + } + } else { + cli.Println("Repository format is up to date") + + remoteName := "origin" + remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) + if err != nil { + return err + } + + if !remoteMigrated { + cli.Println(fmt.Sprintf("Remote %s has not been migrated", remoteName)) + cli.Println("Run 'dolt mgirate --push' to update remote") + } else { + cli.Println(fmt.Sprintf("Remote %s has been migrated", remoteName)) + cli.Println("Run 'dolt migrate --pull' to update refs") + } + } + return nil +} + +func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgParseResults) error { + localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB) + if err != nil { + return err + } + if localMigrationNeeded { + cli.Println("Local repo must be migrated before pushing, run 'dolt migrate'") + return nil + } + + remoteName := "origin" + if apr.NArg() > 0 { + remoteName = apr.Arg(0) + } + + remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) + if err != nil { + return err + } + if remoteMigrated { + cli.Println("Remote %s has been migrated", remoteName) + cli.Println("Run 'dolt migrate --pull' to update refs") + } + + return nil +} + +func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgParseResults) error { + localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB) + if err != nil { + return err + } + if localMigrationNeeded { + return fmt.Errorf("local repo must be migrated before pulling, run 'dolt migrate'\n") + } + + remoteName := "origin" + if apr.NArg() > 0 { + remoteName = apr.Arg(0) + } + + remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) + if err != nil { + return err + } + if !remoteMigrated { + return fmt.Errorf("remote %s has not been migrate, run 'dolt migrate --push %s' to push migration", remoteName, remoteName) + } + + + return nil +} + +func remoteHasBeenMigrated(ctx context.Context, dEnv *env.DoltEnv, remoteName string) (bool, error) { + remotes, err := dEnv.GetRemotes() + + if err != nil { + return false, errors.New("error: failed to read remotes from config.") + } + + remote, remoteOK := remotes[remoteName] + if !remoteOK { + return false, fmt.Errorf("cannot find remote %s", remoteName) + } + + destDB, err := remote.GetRemoteDB(ctx, dEnv.DoltDB.ValueReadWriter().Format()) + + if err != nil { + return false, err + } + + needed, err := rebase.NeedsUniqueTagMigration(ctx, destDB) + if err != nil { + return false, err + } + + return !needed, nil +} + // These subcommands will trigger a unique tags migration func MigrationNeeded(ctx context.Context, dEnv *env.DoltEnv, args []string) bool { - needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv) + needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB) if err != nil { cli.PrintErrf(color.RedString("error checking for repository migration: %s", err.Error())) // ambiguous whether we need to migrate, but we should exit @@ -83,7 +218,7 @@ func MigrationNeeded(ctx context.Context, dEnv *env.DoltEnv, args []string) bool if len(args) > 0 { subCmd = args[0] } - cli.PrintErrln(color.RedString("Cannot execute dolt %s, repository format is out of date.", subCmd)) + cli.PrintErrln(color.RedString("Cannot execute 'dolt %s', repository format is out of date.", subCmd)) cli.Println(migrationPrompt) return true } diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index a152606348..3cbc7ea178 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -35,8 +35,8 @@ import ( type TagMapping map[string]map[uint64]uint64 // NeedsUniqueTagMigration checks if a repo was created before the unique tags constraint and migrates it if necessary. -func NeedsUniqueTagMigration(ctx context.Context, dEnv *env.DoltEnv) (bool, error) { - bb, err := dEnv.DoltDB.GetBranches(ctx) +func NeedsUniqueTagMigration(ctx context.Context, ddb *doltdb.DoltDB) (bool, error) { + bb, err := ddb.GetBranches(ctx) if err != nil { return false, err @@ -49,7 +49,7 @@ func NeedsUniqueTagMigration(ctx context.Context, dEnv *env.DoltEnv) (bool, erro return false, err } - c, err := dEnv.DoltDB.Resolve(ctx, cs) + c, err := ddb.Resolve(ctx, cs) if err != nil { return false, err From ddab08abcd75d6a1ac8272727df81813473787c9 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 13:48:10 -0700 Subject: [PATCH 28/60] first pass add migrate --push and migrate --pull --- go/cmd/dolt/commands/migrate.go | 63 +++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 2 deletions(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index 1cc8634827..580726a8a8 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -19,10 +19,11 @@ import ( "errors" "fmt" "github.com/fatih/color" - "github.com/liquidata-inc/dolt/go/cmd/dolt/cli" + eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1" "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" "github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/ref" "github.com/liquidata-inc/dolt/go/libraries/utils/argparser" "github.com/liquidata-inc/dolt/go/libraries/utils/filesys" ) @@ -60,6 +61,12 @@ func (cmd MigrateCmd) createArgParser() *argparser.ArgParser { return ap } +// EventType returns the type of the event to log +func (cmd MigrateCmd) EventType() eventsapi.ClientEventType { + return eventsapi.ClientEventType_TYPE_UNSPECIFIED +} + + // Exec executes the command func (cmd MigrateCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { ap := cmd.createArgParser() @@ -138,6 +145,16 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg remoteName = apr.Arg(0) } + remotes, err := dEnv.GetRemotes() + if err != nil { + return err + } + + remote, remoteOK := remotes[remoteName] + if !remoteOK { + return fmt.Errorf("unknown remote %s", remoteName) + } + remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) if err != nil { return err @@ -145,6 +162,41 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg if remoteMigrated { cli.Println("Remote %s has been migrated", remoteName) cli.Println("Run 'dolt migrate --pull' to update refs") + return fmt.Errorf("") + } else { + // force push all branches + bb, err := dEnv.DoltDB.GetBranches(ctx) + + if err != nil { + return err + } + + for _, branch := range bb { + refSpec, err := ref.ParseRefSpec(branch.String()) + if err != nil { + return err + } + + src := refSpec.SrcRef(branch) + dest := refSpec.DestRef(src) + + remoteRef, err := getTrackingRef(dest, remote) + + if err != nil { + return err + } + + destDB, err := remote.GetRemoteDB(ctx, dEnv.DoltDB.ValueReadWriter().Format()) + + if err != nil { + return err + } + + mode := ref.RefUpdateMode{Force: true} + err = pushToRemoteBranch(ctx, dEnv, mode, src, dest, remoteRef, dEnv.DoltDB, destDB, remote) + + return err + } } return nil @@ -172,8 +224,15 @@ func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *ar return fmt.Errorf("remote %s has not been migrate, run 'dolt migrate --push %s' to push migration", remoteName, remoteName) } + // force fetch all branches + remotes, _ := dEnv.GetRemotes() + r, refSpecs, verr := getRefSpecs(apr.Args(), dEnv, remotes) - return nil + if verr == nil { + verr = fetchRefSpecs(ctx, ref.RefUpdateMode{Force: true}, dEnv, r, refSpecs) + } + + return verr } func remoteHasBeenMigrated(ctx context.Context, dEnv *env.DoltEnv, remoteName string) (bool, error) { From 6bddb584dcb83245d78a9d06ace192a5e0eef93b Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 15:40:23 -0700 Subject: [PATCH 29/60] force push ALL branches, formatted errors --- go/cmd/dolt/commands/migrate.go | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index 580726a8a8..0f083cfa65 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -18,7 +18,9 @@ import ( "context" "errors" "fmt" + "github.com/fatih/color" + "github.com/liquidata-inc/dolt/go/cmd/dolt/cli" eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1" "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" @@ -30,7 +32,7 @@ import ( const ( migrationPrompt = `Run "dolt migrate" to update this repository to the latest format` - migrationMsg = "Migrating repository to the latest format" + migrationMsg = "Migrating repository to the latest format" migratePushFlag = "push" migratePullFlag = "pull" @@ -53,7 +55,6 @@ func (cmd MigrateCmd) CreateMarkdown(_ filesys.Filesys, _, _ string) error { return nil } - func (cmd MigrateCmd) createArgParser() *argparser.ArgParser { ap := argparser.NewArgParser() ap.SupportsFlag(migratePushFlag, "", "") @@ -66,7 +67,6 @@ func (cmd MigrateCmd) EventType() eventsapi.ClientEventType { return eventsapi.ClientEventType_TYPE_UNSPECIFIED } - // Exec executes the command func (cmd MigrateCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { ap := cmd.createArgParser() @@ -89,7 +89,7 @@ func (cmd MigrateCmd) Exec(ctx context.Context, commandStr string, args []string } if err != nil { - cli.PrintErrf(color.RedString("error migrating: %s", err.Error())) + cli.PrintErrln(color.RedString(err.Error())) return 1 } @@ -136,8 +136,7 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg return err } if localMigrationNeeded { - cli.Println("Local repo must be migrated before pushing, run 'dolt migrate'") - return nil + return fmt.Errorf("Local repo must be migrated before pushing, run 'dolt migrate'") } remoteName := "origin" @@ -160,9 +159,7 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg return err } if remoteMigrated { - cli.Println("Remote %s has been migrated", remoteName) - cli.Println("Run 'dolt migrate --pull' to update refs") - return fmt.Errorf("") + return fmt.Errorf("Remote %s has been migrated\nRun 'dolt migrate --pull' to update refs", remoteName) } else { // force push all branches bb, err := dEnv.DoltDB.GetBranches(ctx) @@ -192,10 +189,14 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg return err } + cli.Println(color.YellowString(fmt.Sprintf("Pushing migrated branch %s to %s", branch.String(), remoteName))) mode := ref.RefUpdateMode{Force: true} err = pushToRemoteBranch(ctx, dEnv, mode, src, dest, remoteRef, dEnv.DoltDB, destDB, remote) - return err + if err != nil { + return err + } + cli.Println() } } @@ -208,7 +209,7 @@ func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *ar return err } if localMigrationNeeded { - return fmt.Errorf("local repo must be migrated before pulling, run 'dolt migrate'\n") + return fmt.Errorf("Local repo must be migrated before pushing, run 'dolt migrate'") } remoteName := "origin" @@ -221,18 +222,18 @@ func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *ar return err } if !remoteMigrated { - return fmt.Errorf("remote %s has not been migrate, run 'dolt migrate --push %s' to push migration", remoteName, remoteName) + return fmt.Errorf("Remote %s has not been migrated\nRun 'dolt migrate --push %s' to push migration", remoteName, remoteName) } // force fetch all branches remotes, _ := dEnv.GetRemotes() - r, refSpecs, verr := getRefSpecs(apr.Args(), dEnv, remotes) + r, refSpecs, err := getRefSpecs(apr.Args(), dEnv, remotes) - if verr == nil { - verr = fetchRefSpecs(ctx, ref.RefUpdateMode{Force: true}, dEnv, r, refSpecs) + if err == nil { + err = fetchRefSpecs(ctx, ref.RefUpdateMode{Force: true}, dEnv, r, refSpecs) } - return verr + return err } func remoteHasBeenMigrated(ctx context.Context, dEnv *env.DoltEnv, remoteName string) (bool, error) { From 9baf7ad26e37a39be168ad033818784715af4ed6 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 12:26:16 -0700 Subject: [PATCH 30/60] go/cmd/dolt: commands/credcmds/import: Add import command for importing existing JWK credential. --- bats/creds.bats | 32 ++++ bats/helper/creds/known-decapitated.jwk | 1 + bats/helper/creds/known-good.jwk | 1 + bats/helper/creds/known-truncated.jwk | 1 + go/cmd/dolt/commands/credcmds/creds.go | 1 + go/cmd/dolt/commands/credcmds/import.go | 176 ++++++++++++++++++ .../eventsapi/v1alpha1/event_constants.pb.go | 98 +++++----- .../eventsapi/v1alpha1/event_constants.proto | 1 + proto/third_party/protobuf | 2 +- 9 files changed, 265 insertions(+), 48 deletions(-) create mode 100755 bats/helper/creds/known-decapitated.jwk create mode 100755 bats/helper/creds/known-good.jwk create mode 100755 bats/helper/creds/known-truncated.jwk create mode 100644 go/cmd/dolt/commands/credcmds/import.go diff --git a/bats/creds.bats b/bats/creds.bats index 7ee25edf27..3133e1fdef 100644 --- a/bats/creds.bats +++ b/bats/creds.bats @@ -87,3 +87,35 @@ teardown() { run dolt creds use [ "$status" -eq 1 ] } + +@test "can import cred from good jwk file" { + dolt creds import `batshelper creds/known-good.jwk` +} + +@test "can import cred from good jwk stdin" { + cat `batshelper creds/known-good.jwk` | dolt creds import +} + +@test "import cred of corrupted jwk from file fails" { + run dolt creds import `batshelper creds/known-truncated.jwk` + [ "$status" -eq 1 ] + run dolt creds import `batshelper creds/known-decapitated.jwk` + [ "$status" -eq 1 ] + run dolt creds import does-not-exist + [ "$status" -eq 1 ] +} + +@test "import cred of corrupted jwk from stdin fails" { + run bash -c "cat `batshelper creds/known-truncated.jwk` | dolt creds import" + [ "$status" -eq 1 ] + run bash -c "cat `batshelper creds/known-decapitated.jwk` | dolt creds import" + [ "$status" -eq 1 ] + run bash -c "echo -n | dolt creds import" + [ "$status" -eq 1 ] +} + +@test "import cred with already used cred does not replace used cred" { + pubkey=`dolt creds new | grep 'pub key:' | awk '{print $3}'` + dolt creds import `batshelper creds/known-good.jwk` + dolt creds ls -v | grep '*' | grep "$pubkey" +} diff --git a/bats/helper/creds/known-decapitated.jwk b/bats/helper/creds/known-decapitated.jwk new file mode 100755 index 0000000000..8245112b13 --- /dev/null +++ b/bats/helper/creds/known-decapitated.jwk @@ -0,0 +1 @@ +O5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYYw4Bw==","kty":"OKP","crv":"Ed25519"} diff --git a/bats/helper/creds/known-good.jwk b/bats/helper/creds/known-good.jwk new file mode 100755 index 0000000000..dc931fd01c --- /dev/null +++ b/bats/helper/creds/known-good.jwk @@ -0,0 +1 @@ +{"d":"7sPHtB3FE7aJVNh2WW65ZnUI4ACA_WN_w-3QhmGMOAc=","x":"jmWVlqO5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYYw4Bw==","kty":"OKP","crv":"Ed25519"} diff --git a/bats/helper/creds/known-truncated.jwk b/bats/helper/creds/known-truncated.jwk new file mode 100755 index 0000000000..a50a7206a9 --- /dev/null +++ b/bats/helper/creds/known-truncated.jwk @@ -0,0 +1 @@ +{"d":"7sPHtB3FE7aJVNh2WW65ZnUI4ACA_WN_w-3QhmGMOAc=","x":"jmWVlqO5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYY diff --git a/go/cmd/dolt/commands/credcmds/creds.go b/go/cmd/dolt/commands/credcmds/creds.go index 34c2f3504e..bcbf3ba9b1 100644 --- a/go/cmd/dolt/commands/credcmds/creds.go +++ b/go/cmd/dolt/commands/credcmds/creds.go @@ -24,4 +24,5 @@ var Commands = cli.NewSubCommandHandler("creds", "Commands for managing credenti LsCmd{}, CheckCmd{}, UseCmd{}, + ImportCmd{}, }) diff --git a/go/cmd/dolt/commands/credcmds/import.go b/go/cmd/dolt/commands/credcmds/import.go new file mode 100644 index 0000000000..9bd780dc43 --- /dev/null +++ b/go/cmd/dolt/commands/credcmds/import.go @@ -0,0 +1,176 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package credcmds + +import ( + "context" + "fmt" + "io" + "os" + + "github.com/liquidata-inc/dolt/go/cmd/dolt/cli" + "github.com/liquidata-inc/dolt/go/cmd/dolt/commands" + "github.com/liquidata-inc/dolt/go/cmd/dolt/errhand" + eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1" + remotesapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/remotesapi/v1alpha1" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/creds" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/env/actions" + "github.com/liquidata-inc/dolt/go/libraries/utils/argparser" + "github.com/liquidata-inc/dolt/go/libraries/utils/filesys" +) + +var importDocs = cli.CommandDocumentationContent{ + ShortDesc: "Import a dolt credential from an existing .jwk file.", + LongDesc: `Imports a dolt credential from an existing .jwk file. + +Dolt credentials are stored in the creds subdirectory of the global dolt conifg +directory as files with one key per file in JWK format. This command can import +a JWK from a file or stdin and places the imported key in the correct place for +dolt to find it as a valid credential. + +This command will set the newly imported credential as the used credential if +there are currently not credentials. If this command does use the new +credential, it will call doltremoteapi to update user.name and user.email with +information from the remote user profile if those fields are not already +available in the local dolt config.`, + Synopsis: []string{"[--no-profile] [{{.LessThan}}jwk_filename{{.GreaterThan}}]"}, +} + +type ImportCmd struct{} + +// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command +func (cmd ImportCmd) Name() string { + return "import" +} + +// Description returns a description of the command +func (cmd ImportCmd) Description() string { + return importDocs.ShortDesc +} + +// CreateMarkdown creates a markdown file containing the helptext for the command at the given path +func (cmd ImportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error { + ap := cmd.createArgParser() + return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, importDocs, ap)) +} + +// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement +// that it be run from within a data repository directory +func (cmd ImportCmd) RequiresRepo() bool { + return false +} + +// EventType returns the type of the event to log +func (cmd ImportCmd) EventType() eventsapi.ClientEventType { + return eventsapi.ClientEventType_CREDS_IMPORT +} + +const noProfileFlag = "no-profile" + +func (cmd ImportCmd) createArgParser() *argparser.ArgParser { + ap := argparser.NewArgParser() + ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"jwk_filename", "The JWK file. If omitted, import operates on stdin."}) + ap.SupportsFlag(noProfileFlag, "", "If provided, no attempt will be made to contact doltremoteapi and update user.name and user.email.") + return ap +} + +// Exec executes the command +func (cmd ImportCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { + ap := cmd.createArgParser() + help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, importDocs, ap)) + apr := cli.ParseArgs(ap, args, help) + + credsDir, verr := actions.EnsureCredsDir(dEnv) + if verr != nil { + return commands.HandleVErrAndExitCode(verr, usage) + } + + noprofile := apr.Contains(noProfileFlag) + var input io.ReadCloser = os.Stdin + if apr.NArg() == 1 { + var err error + input, err = dEnv.FS.OpenForRead(apr.Arg(0)) + if err != nil { + verr = errhand.BuildDError("error: cannot open ", apr.Arg(0)).AddCause(err).Build() + return commands.HandleVErrAndExitCode(verr, usage) + } + defer input.Close() + } + + c, err := creds.JWKCredsRead(input) + if err != nil { + verr = errhand.BuildDError("error: could not read JWK").AddCause(err).Build() + return commands.HandleVErrAndExitCode(verr, usage) + } + if !c.IsPrivKeyValid() || !c.IsPubKeyValid() { + verr = errhand.BuildDError("error: deserialized JWK was not valid").Build() + return commands.HandleVErrAndExitCode(verr, usage) + } + + _, err = creds.JWKCredsWriteToDir(dEnv.FS, credsDir, c) + if err != nil { + verr = errhand.BuildDError("error: could not write credentials to file").AddCause(err).Build() + return commands.HandleVErrAndExitCode(verr, usage) + } + cli.Println("Imported credential:", c.PubKeyBase32Str()) + + err = updateConfigToUseNewCredIfNoExistingCred(dEnv, c) + if err != nil { + cli.Println("Warning: could not update profile to use imported credential:", err) + } + + if !noprofile { + err := updateProfileWithCredentials(ctx, dEnv, c) + if err != nil { + cli.Println("Warning: could not update profile with imported and used credentials:", err) + } + } + + return 0 +} + +func updateProfileWithCredentials(ctx context.Context, dEnv *env.DoltEnv, c creds.DoltCreds) error { + gcfg, hasGCfg := dEnv.Config.GetConfig(env.GlobalConfig) + if !hasGCfg { + panic("Should have global config here...") + } + + if _, err := gcfg.GetString(env.UserNameKey); err == nil { + // Already has a name... + return nil + } + if _, err := gcfg.GetString(env.UserEmailKey); err == nil { + // Already has an email... + return nil + } + + host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost) + port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort) + conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, c) + if err != nil { + return fmt.Errorf("error: unable to connect to server with credentials: %w", err) + } + grpcClient := remotesapi.NewCredentialsServiceClient(conn) + resp, err := grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{}) + if err != nil { + return fmt.Errorf("error: unable to call WhoAmI endpoint: %w", err) + } + userUpdates := map[string]string{ + env.UserNameKey: resp.DisplayName, + env.UserEmailKey: resp.EmailAddress, + } + return gcfg.SetStrings(userUpdates) +} diff --git a/go/gen/proto/dolt/services/eventsapi/v1alpha1/event_constants.pb.go b/go/gen/proto/dolt/services/eventsapi/v1alpha1/event_constants.pb.go index b926c6551c..f0f5747b32 100644 --- a/go/gen/proto/dolt/services/eventsapi/v1alpha1/event_constants.pb.go +++ b/go/gen/proto/dolt/services/eventsapi/v1alpha1/event_constants.pb.go @@ -103,6 +103,7 @@ const ( ClientEventType_BLAME ClientEventType = 45 ClientEventType_CREDS_CHECK ClientEventType = 46 ClientEventType_CREDS_USE ClientEventType = 47 + ClientEventType_CREDS_IMPORT ClientEventType = 48 ) var ClientEventType_name = map[int32]string{ @@ -154,6 +155,7 @@ var ClientEventType_name = map[int32]string{ 45: "BLAME", 46: "CREDS_CHECK", 47: "CREDS_USE", + 48: "CREDS_IMPORT", } var ClientEventType_value = map[string]int32{ @@ -205,6 +207,7 @@ var ClientEventType_value = map[string]int32{ "BLAME": 45, "CREDS_CHECK": 46, "CREDS_USE": 47, + "CREDS_IMPORT": 48, } func (x ClientEventType) String() string { @@ -309,53 +312,54 @@ func init() { } var fileDescriptor_d970d881fa70959f = []byte{ - // 764 bytes of a gzipped FileDescriptorProto + // 772 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xcb, 0x72, 0xdb, 0x36, 0x14, 0xad, 0xfc, 0x90, 0x65, 0xf8, 0x75, 0x8d, 0xd8, 0x79, 0xb6, 0x8e, 0xeb, 0xa6, 0x2f, 0xb5, - 0x16, 0x27, 0xd3, 0x99, 0x6e, 0xba, 0x82, 0xc0, 0x2b, 0x09, 0x13, 0x90, 0x60, 0x00, 0xd0, 0x4a, - 0xba, 0xc1, 0xc8, 0x0a, 0xeb, 0xb0, 0xa3, 0x48, 0xaa, 0x44, 0x7b, 0xa6, 0x3f, 0xd1, 0x6f, 0xee, - 0x80, 0x6c, 0x44, 0x5b, 0x9b, 0xee, 0x78, 0xcf, 0x3d, 0xe7, 0x00, 0x38, 0xbc, 0x00, 0xf9, 0xf5, - 0xc3, 0x6c, 0x52, 0x04, 0xcb, 0x6c, 0x71, 0x97, 0x8f, 0xb3, 0x65, 0x90, 0xdd, 0x65, 0xd3, 0x62, - 0x39, 0x9a, 0xe7, 0xc1, 0xdd, 0xeb, 0xd1, 0x64, 0xfe, 0x71, 0xf4, 0xba, 0x82, 0xdc, 0x78, 0x36, - 0x5d, 0x16, 0xa3, 0x69, 0xb1, 0xec, 0xcc, 0x17, 0xb3, 0x62, 0x46, 0xcf, 0xbd, 0xae, 0xf3, 0x59, - 0xd7, 0x59, 0xe9, 0x3a, 0x9f, 0x75, 0xed, 0x01, 0x69, 0x25, 0x93, 0x51, 0xf1, 0xc7, 0x6c, 0xf1, - 0x89, 0x3e, 0x25, 0x27, 0x89, 0x64, 0xb6, 0xa7, 0x74, 0xe4, 0xd2, 0xd8, 0x24, 0xc8, 0x45, 0x4f, - 0x60, 0x08, 0x5f, 0xd0, 0x5d, 0xb2, 0x2d, 0x45, 0x9c, 0xbe, 0x83, 0x06, 0xdd, 0x23, 0x3b, 0x43, - 0x11, 0x87, 0x6a, 0x68, 0x60, 0x83, 0x12, 0xd2, 0x0c, 0x99, 0x1e, 0x8a, 0x18, 0x36, 0xdb, 0xff, - 0x34, 0xc9, 0x11, 0x9f, 0xe4, 0xd9, 0xb4, 0x40, 0xbf, 0x8c, 0xfd, 0x7b, 0x9e, 0xd1, 0x13, 0x02, - 0xf6, 0x7d, 0x82, 0x6b, 0x6e, 0x2d, 0xb2, 0x25, 0x62, 0x61, 0xa1, 0xe1, 0xf5, 0xc6, 0x32, 0x9b, - 0x7a, 0xaf, 0x1d, 0xb2, 0xc9, 0xc2, 0x10, 0x36, 0xfd, 0x62, 0x1a, 0x0d, 0x5a, 0xd8, 0xf2, 0x7d, - 0xae, 0xa2, 0x48, 0x58, 0xd8, 0xf6, 0x7d, 0xf3, 0x56, 0x42, 0x93, 0x1e, 0x12, 0x62, 0xde, 0x4a, - 0x67, 0x50, 0x5f, 0xa1, 0x86, 0x1d, 0xdf, 0x90, 0xaa, 0x0f, 0x2d, 0xef, 0x1b, 0x8a, 0x5e, 0x0f, - 0x76, 0xbd, 0x45, 0x84, 0xba, 0x8f, 0x40, 0xbc, 0x45, 0x57, 0xb3, 0x98, 0x0f, 0x60, 0x8f, 0xee, - 0x93, 0x16, 0x1f, 0x20, 0x7f, 0xa3, 0x52, 0x0b, 0xfb, 0xbe, 0xa3, 0x31, 0x52, 0x16, 0xe1, 0xc0, - 0x4b, 0x93, 0xd4, 0x0c, 0xe0, 0xb0, 0xfa, 0x92, 0x12, 0x8e, 0xbc, 0x49, 0x0f, 0x2d, 0x1f, 0x00, - 0xf8, 0x4f, 0x2e, 0x55, 0x8c, 0x70, 0x5c, 0x46, 0xa1, 0xfa, 0x22, 0x06, 0xea, 0xa3, 0xb8, 0x42, - 0x6d, 0x84, 0x8a, 0xe1, 0x51, 0xb5, 0xd5, 0xb8, 0x27, 0xfa, 0x70, 0x42, 0x9b, 0x64, 0x43, 0x1a, - 0x38, 0x2d, 0x8f, 0xc7, 0x07, 0x18, 0x31, 0x78, 0x4c, 0x81, 0xec, 0x5b, 0xd6, 0x95, 0xe8, 0x44, - 0x94, 0x28, 0x6d, 0xe1, 0x49, 0x8d, 0xe0, 0xbb, 0x12, 0x79, 0x5a, 0x23, 0x5c, 0x23, 0xb3, 0x08, - 0xcf, 0xfc, 0x8e, 0x2b, 0x44, 0x47, 0xf0, 0xbc, 0xae, 0xa2, 0x2b, 0x78, 0x51, 0x57, 0x3c, 0x81, - 0x2f, 0x6b, 0xad, 0x41, 0x89, 0xdc, 0xc2, 0x57, 0xf4, 0x98, 0x1c, 0x54, 0x48, 0x92, 0x5a, 0xa7, - 0xd5, 0x10, 0xce, 0x6a, 0x92, 0x8e, 0x4a, 0xe4, 0x25, 0x3d, 0x20, 0xbb, 0x5c, 0x63, 0x68, 0x5c, - 0x8c, 0x43, 0x38, 0x2f, 0x13, 0x2a, 0x4b, 0x1d, 0xc1, 0xd7, 0x75, 0x25, 0x0d, 0x5c, 0x94, 0x95, - 0x8a, 0x7b, 0x8e, 0x33, 0x0b, 0xdf, 0x78, 0xab, 0xb2, 0xd2, 0x68, 0x94, 0xbc, 0x42, 0x78, 0x45, - 0x5f, 0x92, 0x17, 0x55, 0x9e, 0x2c, 0x11, 0xae, 0x8f, 0xd6, 0x69, 0x4c, 0x94, 0x8b, 0xd0, 0xb2, - 0x90, 0x59, 0x06, 0xdf, 0xfa, 0xf9, 0xaa, 0x09, 0x03, 0x66, 0x1c, 0x1f, 0xa4, 0xf1, 0x1b, 0x03, - 0xdf, 0xd1, 0x57, 0xe4, 0xfc, 0xa1, 0x34, 0x54, 0xc3, 0x58, 0x2a, 0x16, 0x3a, 0xa9, 0x38, 0xb3, - 0x42, 0xc5, 0x06, 0xbe, 0xa7, 0x17, 0xe4, 0xec, 0x21, 0x2b, 0x4d, 0xd6, 0x38, 0x3f, 0xf8, 0x89, - 0xab, 0x39, 0x1a, 0xbb, 0xcc, 0x20, 0xfc, 0x48, 0x29, 0x39, 0xbc, 0x87, 0x2a, 0x65, 0xa1, 0xfd, - 0x90, 0xf9, 0xdf, 0x94, 0xfd, 0x44, 0xcf, 0xc8, 0xf3, 0x1a, 0x95, 0xc2, 0x58, 0x57, 0x05, 0xd6, - 0x13, 0x12, 0x0d, 0xfc, 0xec, 0x7f, 0x7f, 0x57, 0xb2, 0x08, 0xe1, 0x92, 0x1e, 0x91, 0xbd, 0x2a, - 0x9d, 0x72, 0xa6, 0xa0, 0x53, 0x67, 0x99, 0x1a, 0x84, 0xa0, 0xfd, 0x27, 0x69, 0x45, 0x59, 0xb1, - 0xc8, 0xc7, 0x22, 0xa4, 0x8f, 0x09, 0x8d, 0xd0, 0x6a, 0xc1, 0xd7, 0xae, 0xc2, 0x09, 0x81, 0xee, - 0x7b, 0x8b, 0x66, 0x75, 0x60, 0x0c, 0xa1, 0x41, 0x9f, 0x90, 0x47, 0xab, 0x00, 0x22, 0xe3, 0x50, - 0xb2, 0xc4, 0x60, 0x08, 0x1b, 0xbe, 0x71, 0xef, 0x1c, 0x09, 0x77, 0xa8, 0xb5, 0xd2, 0xb0, 0xd9, - 0x46, 0xb2, 0xc7, 0x8a, 0x62, 0x91, 0x5f, 0xdf, 0x16, 0x99, 0x08, 0xe9, 0x33, 0x72, 0xca, 0xac, - 0xd5, 0xa2, 0x9b, 0xda, 0xf5, 0xcb, 0x77, 0x4a, 0x8e, 0x2b, 0x0b, 0x97, 0x6a, 0xe9, 0xca, 0xf1, - 0x44, 0xd8, 0xb8, 0xd8, 0x6a, 0x35, 0xa0, 0xd1, 0xbe, 0x24, 0xdb, 0x6c, 0x3e, 0xaf, 0xf6, 0xcb, - 0x92, 0xc4, 0x89, 0x70, 0x4d, 0xbd, 0x4f, 0x5a, 0x1e, 0x0f, 0x95, 0xb4, 0xd0, 0xe8, 0x0e, 0x7f, - 0x4f, 0x6f, 0xf2, 0xe2, 0xe3, 0xed, 0x75, 0x67, 0x3c, 0xfb, 0x14, 0x4c, 0xf2, 0xbf, 0x6e, 0xf3, - 0x0f, 0xa3, 0x62, 0x74, 0x99, 0x4f, 0xc7, 0x41, 0xf9, 0x62, 0xdd, 0xcc, 0x82, 0x9b, 0x6c, 0x1a, - 0x94, 0x8f, 0x51, 0xf0, 0x7f, 0x6f, 0xd8, 0x6f, 0x2b, 0xe8, 0xba, 0x59, 0x2a, 0x7e, 0xf9, 0x37, - 0x00, 0x00, 0xff, 0xff, 0xbf, 0x8f, 0x66, 0xbe, 0xf8, 0x04, 0x00, 0x00, + 0x16, 0x9b, 0xe9, 0x4c, 0x37, 0x5d, 0x41, 0xe0, 0x95, 0x84, 0x09, 0x48, 0x30, 0x00, 0x68, 0x25, + 0xdd, 0x60, 0x64, 0x85, 0x75, 0xd8, 0x51, 0x24, 0x55, 0xa2, 0x3d, 0xd3, 0xbf, 0xe9, 0xa7, 0x76, + 0x40, 0x26, 0x62, 0xac, 0x4d, 0x77, 0x3c, 0xe7, 0xde, 0x73, 0x08, 0x1c, 0x5c, 0x80, 0xfc, 0xf6, + 0x6e, 0x36, 0x29, 0x82, 0x65, 0xb6, 0xb8, 0xcb, 0xc7, 0xd9, 0x32, 0xc8, 0xee, 0xb2, 0x69, 0xb1, + 0x1c, 0xcd, 0xf3, 0xe0, 0xee, 0xe5, 0x68, 0x32, 0x7f, 0x3f, 0x7a, 0x59, 0x51, 0x6e, 0x3c, 0x9b, + 0x2e, 0x8b, 0xd1, 0xb4, 0x58, 0x76, 0xe6, 0x8b, 0x59, 0x31, 0xa3, 0xe7, 0x5e, 0xd7, 0xf9, 0xa4, + 0xeb, 0xac, 0x74, 0x9d, 0x4f, 0xba, 0xf6, 0x80, 0xb4, 0x92, 0xc9, 0xa8, 0xf8, 0x73, 0xb6, 0xf8, + 0x40, 0x1f, 0x93, 0x93, 0x44, 0x32, 0xdb, 0x53, 0x3a, 0x72, 0x69, 0x6c, 0x12, 0xe4, 0xa2, 0x27, + 0x30, 0x84, 0x2f, 0xe8, 0x2e, 0xd9, 0x96, 0x22, 0x4e, 0xdf, 0x40, 0x83, 0xee, 0x91, 0x9d, 0xa1, + 0x88, 0x43, 0x35, 0x34, 0xb0, 0x41, 0x09, 0x69, 0x86, 0x4c, 0x0f, 0x45, 0x0c, 0x9b, 0xed, 0x7f, + 0x9b, 0xe4, 0x88, 0x4f, 0xf2, 0x6c, 0x5a, 0xa0, 0xff, 0x8d, 0xfd, 0x67, 0x9e, 0xd1, 0x13, 0x02, + 0xf6, 0x6d, 0x82, 0x6b, 0x6e, 0x2d, 0xb2, 0x25, 0x62, 0x61, 0xa1, 0xe1, 0xf5, 0xc6, 0x32, 0x9b, + 0x7a, 0xaf, 0x1d, 0xb2, 0xc9, 0xc2, 0x10, 0x36, 0xfd, 0xcf, 0x34, 0x1a, 0xb4, 0xb0, 0xe5, 0xeb, + 0x5c, 0x45, 0x91, 0xb0, 0xb0, 0xed, 0xeb, 0xe6, 0xb5, 0x84, 0x26, 0x3d, 0x24, 0xc4, 0xbc, 0x96, + 0xce, 0xa0, 0xbe, 0x42, 0x0d, 0x3b, 0xbe, 0x20, 0x55, 0x1f, 0x5a, 0xde, 0x37, 0x14, 0xbd, 0x1e, + 0xec, 0x7a, 0x8b, 0x08, 0x75, 0x1f, 0x81, 0x78, 0x8b, 0xae, 0x66, 0x31, 0x1f, 0xc0, 0x1e, 0xdd, + 0x27, 0x2d, 0x3e, 0x40, 0xfe, 0x4a, 0xa5, 0x16, 0xf6, 0x7d, 0x45, 0x63, 0xa4, 0x2c, 0xc2, 0x81, + 0x97, 0x26, 0xa9, 0x19, 0xc0, 0x61, 0xf5, 0x25, 0x25, 0x1c, 0x79, 0x93, 0x1e, 0x5a, 0x3e, 0x00, + 0xf0, 0x9f, 0x5c, 0xaa, 0x18, 0xe1, 0xb8, 0x8c, 0x42, 0xf5, 0x45, 0x0c, 0xd4, 0x47, 0x71, 0x85, + 0xda, 0x08, 0x15, 0xc3, 0x83, 0x6a, 0xa9, 0x71, 0x4f, 0xf4, 0xe1, 0x84, 0x36, 0xc9, 0x86, 0x34, + 0x70, 0x5a, 0x6e, 0x8f, 0x0f, 0x30, 0x62, 0xf0, 0x90, 0x02, 0xd9, 0xb7, 0xac, 0x2b, 0xd1, 0x89, + 0x28, 0x51, 0xda, 0xc2, 0xa3, 0x9a, 0xc1, 0x37, 0x25, 0xf3, 0xb8, 0x66, 0xb8, 0x46, 0x66, 0x11, + 0x9e, 0xf8, 0x15, 0x57, 0x8c, 0x8e, 0xe0, 0x69, 0x8d, 0xa2, 0x2b, 0x78, 0x56, 0x23, 0x9e, 0xc0, + 0x97, 0xb5, 0xd6, 0xa0, 0x44, 0x6e, 0xe1, 0x2b, 0x7a, 0x4c, 0x0e, 0x2a, 0x26, 0x49, 0xad, 0xd3, + 0x6a, 0x08, 0x67, 0x75, 0x93, 0x8e, 0x4a, 0xe6, 0x39, 0x3d, 0x20, 0xbb, 0x5c, 0x63, 0x68, 0x5c, + 0x8c, 0x43, 0x38, 0x2f, 0x13, 0x2a, 0xa1, 0x8e, 0xe0, 0xeb, 0x1a, 0x49, 0x03, 0x17, 0x25, 0x52, + 0x71, 0xcf, 0x71, 0x66, 0xe1, 0x1b, 0x6f, 0x55, 0x22, 0x8d, 0x46, 0xc9, 0x2b, 0x84, 0x17, 0xf4, + 0x39, 0x79, 0x56, 0xe5, 0xc9, 0x12, 0xe1, 0xfa, 0x68, 0x9d, 0xc6, 0x44, 0xb9, 0x08, 0x2d, 0x0b, + 0x99, 0x65, 0xf0, 0xad, 0x9f, 0xaf, 0xba, 0x61, 0xc0, 0x8c, 0xe3, 0x83, 0x34, 0x7e, 0x65, 0xe0, + 0x3b, 0xfa, 0x82, 0x9c, 0xdf, 0x97, 0x86, 0x6a, 0x18, 0x4b, 0xc5, 0x42, 0x27, 0x15, 0x67, 0x56, + 0xa8, 0xd8, 0xc0, 0xf7, 0xf4, 0x82, 0x9c, 0xdd, 0xef, 0x4a, 0x93, 0xb5, 0x9e, 0x1f, 0xfc, 0xc4, + 0xd5, 0x3d, 0x1a, 0xbb, 0xcc, 0x20, 0xfc, 0x48, 0x29, 0x39, 0xfc, 0x8c, 0x55, 0xca, 0x42, 0xfb, + 0x7e, 0xe7, 0xc7, 0x29, 0xfb, 0x89, 0x9e, 0x91, 0xa7, 0x35, 0x2b, 0x85, 0xb1, 0xae, 0x0a, 0xac, + 0x27, 0x24, 0x1a, 0xf8, 0xd9, 0x1f, 0x7f, 0x57, 0xb2, 0x08, 0xe1, 0x92, 0x1e, 0x91, 0xbd, 0x2a, + 0x9d, 0x72, 0xa6, 0xa0, 0x53, 0x67, 0x99, 0x1a, 0x84, 0xa0, 0x4c, 0xa8, 0x84, 0x1f, 0x4f, 0xfc, + 0x97, 0xf6, 0x5f, 0xa4, 0x15, 0x65, 0xc5, 0x22, 0x1f, 0x8b, 0x90, 0x3e, 0x24, 0x34, 0x42, 0xab, + 0x05, 0x5f, 0xbb, 0x1c, 0x27, 0x04, 0xba, 0x6f, 0x2d, 0x9a, 0x55, 0x04, 0x18, 0x42, 0x83, 0x3e, + 0x22, 0x0f, 0x56, 0x91, 0x44, 0xc6, 0xa1, 0x64, 0x89, 0xc1, 0x10, 0x36, 0x7c, 0xe1, 0xb3, 0x9d, + 0x25, 0xdc, 0xa1, 0xd6, 0x4a, 0xc3, 0x66, 0x1b, 0xc9, 0x1e, 0x2b, 0x8a, 0x45, 0x7e, 0x7d, 0x5b, + 0x64, 0x22, 0xa4, 0x4f, 0xc8, 0x29, 0xb3, 0x56, 0x8b, 0x6e, 0x6a, 0xd7, 0xaf, 0xe3, 0x29, 0x39, + 0xae, 0x2c, 0x5c, 0xaa, 0xa5, 0x2b, 0x07, 0x16, 0x61, 0xe3, 0x62, 0xab, 0xd5, 0x80, 0x46, 0xfb, + 0x92, 0x6c, 0xb3, 0xf9, 0xbc, 0x5a, 0x2f, 0x4b, 0x12, 0x27, 0xc2, 0x35, 0xf5, 0x3e, 0x69, 0x79, + 0x3e, 0x54, 0xd2, 0x42, 0xa3, 0x3b, 0xfc, 0x23, 0xbd, 0xc9, 0x8b, 0xf7, 0xb7, 0xd7, 0x9d, 0xf1, + 0xec, 0x43, 0x30, 0xc9, 0xff, 0xbe, 0xcd, 0xdf, 0x8d, 0x8a, 0xd1, 0x65, 0x3e, 0x1d, 0x07, 0xe5, + 0x1b, 0x76, 0x33, 0x0b, 0x6e, 0xb2, 0x69, 0x50, 0x3e, 0x4f, 0xc1, 0xff, 0xbd, 0x6a, 0xbf, 0xaf, + 0xa8, 0xeb, 0x66, 0xa9, 0xf8, 0xf5, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x04, 0xe0, 0xf2, 0x36, + 0x0a, 0x05, 0x00, 0x00, } diff --git a/proto/dolt/services/eventsapi/v1alpha1/event_constants.proto b/proto/dolt/services/eventsapi/v1alpha1/event_constants.proto index ce0c9d979d..6847ea1d47 100644 --- a/proto/dolt/services/eventsapi/v1alpha1/event_constants.proto +++ b/proto/dolt/services/eventsapi/v1alpha1/event_constants.proto @@ -76,6 +76,7 @@ enum ClientEventType { BLAME = 45; CREDS_CHECK = 46; CREDS_USE = 47; + CREDS_IMPORT = 48; } enum MetricID { diff --git a/proto/third_party/protobuf b/proto/third_party/protobuf index 6a59a2ad1f..6f9d488149 160000 --- a/proto/third_party/protobuf +++ b/proto/third_party/protobuf @@ -1 +1 @@ -Subproject commit 6a59a2ad1f61d9696092f79b6d74368b4d7970a3 +Subproject commit 6f9d48814928d99c079ac2dcbd4c2a50d6e97915 From 4138913b827a4c37999ca9ba893287d8a33cf537 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 16:20:07 -0700 Subject: [PATCH 31/60] fix error message --- go/cmd/dolt/commands/migrate.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index 0f083cfa65..536f08108a 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -209,7 +209,7 @@ func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *ar return err } if localMigrationNeeded { - return fmt.Errorf("Local repo must be migrated before pushing, run 'dolt migrate'") + return fmt.Errorf("Local repo must be migrated before pulling, run 'dolt migrate'") } remoteName := "origin" From d9e9f5c9812de50d6e1c4143710f5a3a9a3a77ea Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Mon, 13 Apr 2020 16:45:02 -0700 Subject: [PATCH 32/60] go/cmd/dolt/commands/credcmds: import.go: Add formatting directive for missing filename. --- go/cmd/dolt/commands/credcmds/import.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/cmd/dolt/commands/credcmds/import.go b/go/cmd/dolt/commands/credcmds/import.go index 9bd780dc43..6cbe1e56f8 100644 --- a/go/cmd/dolt/commands/credcmds/import.go +++ b/go/cmd/dolt/commands/credcmds/import.go @@ -104,7 +104,7 @@ func (cmd ImportCmd) Exec(ctx context.Context, commandStr string, args []string, var err error input, err = dEnv.FS.OpenForRead(apr.Arg(0)) if err != nil { - verr = errhand.BuildDError("error: cannot open ", apr.Arg(0)).AddCause(err).Build() + verr = errhand.BuildDError("error: cannot open %s", apr.Arg(0)).AddCause(err).Build() return commands.HandleVErrAndExitCode(verr, usage) } defer input.Close() From 2c35e43dd0b300bdde1ff6be366e4760fe5e2f81 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 16:58:21 -0700 Subject: [PATCH 33/60] updated command description, help messages --- go/cmd/dolt/commands/migrate.go | 34 +++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index 536f08108a..c6b84b1057 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -57,8 +57,8 @@ func (cmd MigrateCmd) CreateMarkdown(_ filesys.Filesys, _, _ string) error { func (cmd MigrateCmd) createArgParser() *argparser.ArgParser { ap := argparser.NewArgParser() - ap.SupportsFlag(migratePushFlag, "", "") - ap.SupportsFlag(migratePullFlag, "", "") + ap.SupportsFlag(migratePushFlag, "", "Push all migrated branches to the remote") + ap.SupportsFlag(migratePullFlag, "", "Update all remote refs for a migrated remote") return ap } @@ -112,21 +112,23 @@ func migrateLocalRepo(ctx context.Context, dEnv *env.DoltEnv) error { } } else { cli.Println("Repository format is up to date") - - remoteName := "origin" - remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) - if err != nil { - return err - } - - if !remoteMigrated { - cli.Println(fmt.Sprintf("Remote %s has not been migrated", remoteName)) - cli.Println("Run 'dolt mgirate --push' to update remote") - } else { - cli.Println(fmt.Sprintf("Remote %s has been migrated", remoteName)) - cli.Println("Run 'dolt migrate --pull' to update refs") - } } + + remoteName := "origin" + remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName) + if err != nil { + // if we can't check the remote, exit silently + return nil + } + + if !remoteMigrated { + cli.Println(fmt.Sprintf("Remote %s has not been migrated", remoteName)) + cli.Println(fmt.Sprintf("Run 'dolt migrate --push' %s to update remote", remoteName)) + } else { + cli.Println(fmt.Sprintf("Remote %s has been migrated", remoteName)) + cli.Println(fmt.Sprintf("Run 'dolt migrate --pull %s' to update refs", remoteName)) + } + return nil } From 9073ad7a4918976e51e01217a05e4d8a4bf77318 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Mon, 13 Apr 2020 18:34:42 -0700 Subject: [PATCH 34/60] test for migrate --push and --pull --- .../migration_test_files/helper/common.bash | 16 +++++ .../helper/windows-compat.bash | 24 +++++++ .../migration_test_files/migrate-push-pull.sh | 69 +++++++++++++++++++ .../migration_test_files/migrate.bats | 59 ++++++++++++++++ .../migration_test_files/setup_repo.sh | 67 ++++++++++++++++++ 5 files changed, 235 insertions(+) create mode 100644 bats/compatibility/migration_test_files/helper/common.bash create mode 100644 bats/compatibility/migration_test_files/helper/windows-compat.bash create mode 100755 bats/compatibility/migration_test_files/migrate-push-pull.sh create mode 100644 bats/compatibility/migration_test_files/migrate.bats create mode 100755 bats/compatibility/migration_test_files/setup_repo.sh diff --git a/bats/compatibility/migration_test_files/helper/common.bash b/bats/compatibility/migration_test_files/helper/common.bash new file mode 100644 index 0000000000..b2130f32dc --- /dev/null +++ b/bats/compatibility/migration_test_files/helper/common.bash @@ -0,0 +1,16 @@ +load helper/windows-compat + +if [ -z "$BATS_TMPDIR" ]; then + export BATS_TMPDIR=$HOME/batstmp/ + mkdir $BATS_TMPDIR +fi + +setup_common() { + echo "setup" > /dev/null +} + +teardown_common() { + echo "teardown" > /dev/null +} + +dolt config --global --add metrics.disabled true > /dev/null 2>&1 diff --git a/bats/compatibility/migration_test_files/helper/windows-compat.bash b/bats/compatibility/migration_test_files/helper/windows-compat.bash new file mode 100644 index 0000000000..3a7049eccc --- /dev/null +++ b/bats/compatibility/migration_test_files/helper/windows-compat.bash @@ -0,0 +1,24 @@ +nativepath() { echo "$1"; } +nativevar() { eval export "$1"="$2"; } +skiponwindows() { :; } + +IS_WINDOWS=false + +if [ -d /mnt/c/Windows/System32 ]; then + IS_WINDOWS=true + if [ ! -d /mnt/c/batstmp ]; then + mkdir /mnt/c/batstmp + fi + BATS_TMPDIR=`TMPDIR=/mnt/c/batstmp mktemp -d -t dolt-bats-tests-XXXXXX` + export BATS_TMPDIR + nativepath() { + wslpath -w "$1" + } + nativevar() { + eval export "$1"="$2" + export WSLENV="$1$3" + } + skiponwindows() { + skip "$1" + } +fi diff --git a/bats/compatibility/migration_test_files/migrate-push-pull.sh b/bats/compatibility/migration_test_files/migrate-push-pull.sh new file mode 100755 index 0000000000..98e54c6fd4 --- /dev/null +++ b/bats/compatibility/migration_test_files/migrate-push-pull.sh @@ -0,0 +1,69 @@ +#!/bin/bash + +set -eo pipefail + +function download_release() { + ver=$1 + dirname=binaries/"$ver" + mkdir "$dirname" + basename=dolt-"$PLATFORM_TUPLE" + filename="$basename".tar.gz + filepath=binaries/"$ver"/"$filename" + url="https://github.com/liquidata-inc/dolt/releases/download/$ver/$filename" + curl -L -o "$filepath" "$url" + cd "$dirname" && tar zxf "$filename" + echo "$dirname"/"$basename"/bin +} + +get_platform_tuple() { + OS=$(uname) + ARCH=$(uname -m) + if [ "$OS" != Linux -a "$OS" != Darwin ]; then + echo "tests only support linux or macOS." 1>&2 + exit 1 + fi + if [ "$ARCH" != x86_64 -a "$ARCH" != i386 -a "$ARCH" != i686 ]; then + echo "tests only support x86_64 or x86." 1>&2 + exit 1 + fi + if [ "$OS" == Linux ]; then + PLATFORM_TUPLE=linux + else + PLATFORM_TUPLE=darwin + fi + if [ "$ARCH" == x86_64 ]; then + PLATFORM_TUPLE="$PLATFORM_TUPLE"-amd64 + else + PLATFORM_TUPLE="$PLATFORM_TUPLE"-386 + fi + echo "$PLATFORM_TUPLE" +} + +PLATFORM_TUPLE=`get_platform_tuple` + +setup_test_repos() { + ./setup_repo.sh "$1" + mkdir "$1-remote" + pushd "$1" + dolt remote add origin "file://../$1-remote" + # branches created in setup_repo.sh + dolt push origin init + dolt push origin master + dolt push origin other + popd + dolt clone "file://$1-remote" "$1-clone" +} + +TOP_DIR=`pwd` +function cleanup() { + pushd $TOP_DIR + rm -rf binaries + rm -rf repo* + popd +} +mkdir binaries +trap cleanup "EXIT" + +bin=`download_release "v0.15.2"` +PATH="`pwd`"/"$bin":"$PATH" setup_test_repos "repo" +TEST_REPO="repo" bats migrate.bats diff --git a/bats/compatibility/migration_test_files/migrate.bats b/bats/compatibility/migration_test_files/migrate.bats new file mode 100644 index 0000000000..872aca0be7 --- /dev/null +++ b/bats/compatibility/migration_test_files/migrate.bats @@ -0,0 +1,59 @@ +#!/usr/bin/env bats +load $BATS_TEST_DIRNAME/helper/common.bash + +setup() { + setup_common +} + +teardown() { + teardown_common +} + +@test "dolt migrate --push & dolt migrate --pull" { + pushd "$TEST_REPO" + + run dolt migrate --pull + [ "$status" -ne "0" ] + [[ "$output" =~ "Local repo must be migrated before pulling, run 'dolt migrate'" ]] || false + + run dolt migrate --push + [ "$status" -ne "0" ] + [[ "$output" =~ "Local repo must be migrated before pushing, run 'dolt migrate'" ]] || false + + run dolt migrate + [ "$status" -eq "0" ] + [[ "$output" =~ "Migrating repository to the latest format" ]] || false + + run dolt migrate --pull + [ "$status" -ne "0" ] + [[ "$output" =~ "Remote origin has not been migrated" ]] || false + [[ "$output" =~ "Run 'dolt migrate --push origin' to push migration" ]] || false + + run dolt migrate --push + [ "$status" -eq "0" ] + + popd + pushd "$TEST_REPO-clone" + + run dolt migrate --pull + [ "$status" -ne "0" ] + [[ "$output" =~ "Local repo must be migrated before pulling, run 'dolt migrate'" ]] || false + + run dolt migrate --push + [ "$status" -ne "0" ] + [[ "$output" =~ "Local repo must be migrated before pushing, run 'dolt migrate'" ]] || false + + run dolt migrate + [ "$status" -eq "0" ] + [[ "$output" =~ "Migrating repository to the latest format" ]] || false + + run dolt migrate --push + [ "$status" -ne "0" ] + [[ "$output" =~ "Remote origin has been migrated" ]] || false + [[ "$output" =~ "Run 'dolt migrate --pull' to update refs" ]] || false + + run dolt migrate --pull + [ "$status" -eq "0" ] + + popd +} diff --git a/bats/compatibility/migration_test_files/setup_repo.sh b/bats/compatibility/migration_test_files/setup_repo.sh new file mode 100755 index 0000000000..9ff3645981 --- /dev/null +++ b/bats/compatibility/migration_test_files/setup_repo.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +set -eo pipefail + +mkdir "$1" +cd "$1" + +dolt init + +dolt sql < Date: Mon, 13 Apr 2020 18:00:39 -0700 Subject: [PATCH 35/60] Skipping git-dolt bats tests on Windows due to flakiness --- bats/git-dolt.bats | 1 + 1 file changed, 1 insertion(+) diff --git a/bats/git-dolt.bats b/bats/git-dolt.bats index 0d291f8652..e1ca1a023a 100755 --- a/bats/git-dolt.bats +++ b/bats/git-dolt.bats @@ -13,6 +13,7 @@ setup() { dolt remote add test-remote $REMOTE dolt push test-remote master export DOLT_HEAD_COMMIT=`get_head_commit` + skiponwindows "git-dolt tests are flaky on Windows" } teardown() { From b71aa93cae938fcd8eb4fba170fc14d399ff962f Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Mon, 13 Apr 2020 20:48:53 -0700 Subject: [PATCH 36/60] First pass at auto commit via sessions Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 2 +- go/libraries/doltcore/sqle/database.go | 22 +++++++-------- go/libraries/doltcore/sqle/dolt_session.go | 33 ++++++++++++++++++++-- 3 files changed, 42 insertions(+), 15 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 3d51d5546c..bf81971fad 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -1304,7 +1304,7 @@ func printOKResult(ctx context.Context, iter sql.RowIter) error { } cli.Printf("Query OK, %d %s affected\n", okResult.RowsAffected, rowNoun) - if okResult.Info != "" { + if okResult.Info != nil { cli.Printf("%s\n", okResult.Info) } } diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index 1b2650d91f..9762f850a5 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -451,7 +451,7 @@ func (db *Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { return nil, err } - dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot} + dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot, db.ddb, db.rsw} return newRoot, nil } } @@ -470,17 +470,17 @@ func (db *Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { ctx.Session.Set(key, hashType, hashStr) dsess := DSessFromSess(ctx.Session) - dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot} + dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot, db.ddb, db.rsw} - if db.batchMode == autoCommit { - h, err := db.ddb.WriteRootValue(ctx, newRoot) - if err != nil { - return err - } - - db.defRoot = newRoot - return db.rsw.SetWorkingHash(ctx, h) - } + // if db.batchMode == autoCommit { + // h, err := db.ddb.WriteRootValue(ctx, newRoot) + // if err != nil { + // return err + // } + // + // db.defRoot = newRoot + // return db.rsw.SetWorkingHash(ctx, h) + // } return nil } diff --git a/go/libraries/doltcore/sqle/dolt_session.go b/go/libraries/doltcore/sqle/dolt_session.go index bab780c69d..2f266eadeb 100644 --- a/go/libraries/doltcore/sqle/dolt_session.go +++ b/go/libraries/doltcore/sqle/dolt_session.go @@ -15,6 +15,7 @@ package sqle import ( + "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" "github.com/src-d/go-mysql-server/sql" "github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb" @@ -23,6 +24,8 @@ import ( type dbRoot struct { hashStr string root *doltdb.RootValue + ddb *doltdb.DoltDB + rsw env.RepoStateWriter } // DoltSession is the sql.Session implementation used by dolt. It is accessible through a *sql.Context instance @@ -33,7 +36,9 @@ type DoltSession struct { // DefaultDoltSession creates a DoltSession object with default values func DefaultDoltSession() *DoltSession { - return &DoltSession{sql.NewBaseSession(), make(map[string]dbRoot)} + sess := &DoltSession{sql.NewBaseSession(), make(map[string]dbRoot)} + sess.Set(sql.AutoCommitSessionVar, sql.Boolean, true) + return sess } // NewSessionWithDefaultRoot creates a DoltSession object from a standard sql.Session and 0 or more Database objects. @@ -49,13 +54,35 @@ func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...*Database) (*DoltSes hashStr := h.String() - dbRoots[db.Name()] = dbRoot{hashStr: hashStr, root: defRoot} + dbRoots[db.Name()] = dbRoot{hashStr: hashStr, root: defRoot, rsw: db.rsw, ddb: db.ddb} } - return &DoltSession{sqlSess, dbRoots}, nil + sess := &DoltSession{sqlSess, dbRoots} + sess.Set(sql.AutoCommitSessionVar, sql.Boolean, true) + return sess, nil } // DSessFromSess retrieves a dolt session from a standard sql.Session func DSessFromSess(sess sql.Session) *DoltSession { return sess.(*DoltSession) } + +func (sess *DoltSession) CommitTransaction(ctx *sql.Context) error { + currentDb := sess.GetCurrentDatabase() + if currentDb == "" { + return sql.ErrNoDatabaseSelected.New() + } + + dbRoot, ok := sess.dbRoots[currentDb] + if !ok { + return sql.ErrDatabaseNotFound.New(currentDb) + } + + root := dbRoot.root + h, err := dbRoot.ddb.WriteRootValue(ctx, root) + if err != nil { + return err + } + + return dbRoot.rsw.SetWorkingHash(ctx, h) +} \ No newline at end of file From 77cff4584c4c96f052fc71d9775c90b67accfc97 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Mon, 13 Apr 2020 21:16:54 -0700 Subject: [PATCH 37/60] Working auto commit and reload code Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sqlserver/server.go | 2 +- go/libraries/doltcore/sqle/database.go | 22 ++++++++++++---------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/go/cmd/dolt/commands/sqlserver/server.go b/go/cmd/dolt/commands/sqlserver/server.go index 2dc72aa0c1..feb2679a83 100644 --- a/go/cmd/dolt/commands/sqlserver/server.go +++ b/go/cmd/dolt/commands/sqlserver/server.go @@ -162,7 +162,7 @@ func newSessionBuilder(sqlEngine *sqle.Engine) server.SessionBuilder { dbs := dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases()) for _, db := range dbs { - err := db.SetRoot(sqlCtx, db.GetDefaultRoot()) + err := db.LoadRootFromRepoState(sqlCtx) if err != nil { return nil, nil, nil, err } diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index 9762f850a5..05d5774a12 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -472,19 +472,21 @@ func (db *Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { dsess := DSessFromSess(ctx.Session) dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot, db.ddb, db.rsw} - // if db.batchMode == autoCommit { - // h, err := db.ddb.WriteRootValue(ctx, newRoot) - // if err != nil { - // return err - // } - // - // db.defRoot = newRoot - // return db.rsw.SetWorkingHash(ctx, h) - // } - return nil } +// LoadRootFromRepoState loads the root value from the repo state's working hash, then calls SetRoot with the loaded +// root value. +func (db *Database) LoadRootFromRepoState(ctx *sql.Context) error { + workingHash := db.rsr.WorkingHash() + root, err := db.ddb.ReadRootValue(ctx, workingHash) + if err != nil { + return err + } + + return db.SetRoot(ctx, root) +} + // DropTable drops the table with the name given func (db *Database) DropTable(ctx *sql.Context, tableName string) error { root, err := db.GetRoot(ctx) From cbf7fdb9efde4dbbe6717f37a5f1299fd56a6b57 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Tue, 14 Apr 2020 06:21:29 -0700 Subject: [PATCH 38/60] bats/creds.bat: Patch up for windows. Remove useless cat. --- bats/creds.bats | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bats/creds.bats b/bats/creds.bats index 3133e1fdef..c184cdb815 100644 --- a/bats/creds.bats +++ b/bats/creds.bats @@ -93,7 +93,7 @@ teardown() { } @test "can import cred from good jwk stdin" { - cat `batshelper creds/known-good.jwk` | dolt creds import + dolt creds import <`batshelper creds/known-good.jwk` } @test "import cred of corrupted jwk from file fails" { @@ -106,11 +106,11 @@ teardown() { } @test "import cred of corrupted jwk from stdin fails" { - run bash -c "cat `batshelper creds/known-truncated.jwk` | dolt creds import" + run dolt creds import <`batshelper creds/known-truncated.jwk` [ "$status" -eq 1 ] - run bash -c "cat `batshelper creds/known-decapitated.jwk` | dolt creds import" + run dolt creds import <`batshelper creds/known-decapitated.jwk` [ "$status" -eq 1 ] - run bash -c "echo -n | dolt creds import" + run dolt creds import Date: Tue, 14 Apr 2020 09:51:29 -0700 Subject: [PATCH 39/60] Reverted back to non-pointer receivers for Database, added repoStateWriter for all database constructors. Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 28 +++--- go/cmd/dolt/commands/sqlserver/server.go | 10 +-- go/cmd/dolt/commands/sqlserver/server_test.go | 4 +- go/cmd/dolt/commands/sqlserver/sqlserver.go | 6 +- go/libraries/doltcore/sqle/common_test.go | 4 +- go/libraries/doltcore/sqle/database.go | 86 ++++++++----------- go/libraries/doltcore/sqle/dolt_session.go | 2 +- go/libraries/doltcore/sqle/indexes.go | 8 +- .../sqle/logictest/dolt/doltharness.go | 2 +- go/libraries/doltcore/sqle/schema_table.go | 2 +- go/libraries/doltcore/sqle/sqlbatch_test.go | 6 +- .../doltcore/sqle/table_editor_test.go | 2 +- go/libraries/doltcore/sqle/tables.go | 2 +- go/libraries/doltcore/sqle/testutil.go | 6 +- 14 files changed, 77 insertions(+), 91 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index bf81971fad..74f343750c 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -346,12 +346,12 @@ func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { - return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) +func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) } -func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { - return dsqle.NewBatchedDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState) +func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewBatchedDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) } func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, query string, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) { @@ -383,12 +383,12 @@ func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database +type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database // CollectDBs takes a MultiRepoEnv and creates Database objects from each environment and returns a slice of these // objects. -func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []*dsqle.Database { - dbs := make([]*dsqle.Database, 0, len(mrEnv)) +func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []dsqle.Database { + dbs := make([]dsqle.Database, 0, len(mrEnv)) _ = mrEnv.Iter(func(name string, dEnv *env.DoltEnv) (stop bool, err error) { root := roots[name] db := createDB(name, root, dEnv) @@ -906,7 +906,7 @@ func (s *stats) shouldFlush() bool { } func flushBatchedEdits(ctx *sql.Context, se *sqlEngine) error { - err := se.iterDBs(func(_ string, db *dsqle.Database) (bool, error) { + err := se.iterDBs(func(_ string, db dsqle.Database) (bool, error) { err := db.Flush(ctx) if err != nil { @@ -1071,7 +1071,7 @@ const ( ) type sqlEngine struct { - dbs map[string]*dsqle.Database + dbs map[string]dsqle.Database mrEnv env.MultiRepoEnv engine *sqle.Engine resultFormat resultFormat @@ -1080,11 +1080,11 @@ type sqlEngine struct { var ErrDBNotFoundKind = errors.NewKind("database '%s' not found") // sqlEngine packages up the context necessary to run sql queries against sqle. -func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat, dbs ...*dsqle.Database) (*sqlEngine, error) { +func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat, dbs ...dsqle.Database) (*sqlEngine, error) { engine := sqle.NewDefault() engine.AddDatabase(sql.NewInformationSchemaDatabase(engine.Catalog)) - nameToDB := make(map[string]*dsqle.Database) + nameToDB := make(map[string]dsqle.Database) for _, db := range dbs { nameToDB[db.Name()] = db root := roots[db.Name()] @@ -1110,17 +1110,17 @@ func newSqlEngine(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string] return &sqlEngine{nameToDB, mrEnv, engine, format}, nil } -func (se *sqlEngine) getDB(name string) (*dsqle.Database, error) { +func (se *sqlEngine) getDB(name string) (dsqle.Database, error) { db, ok := se.dbs[name] if !ok { - return nil, ErrDBNotFoundKind.New(name) + return dsqle.Database{}, ErrDBNotFoundKind.New(name) } return db, nil } -func (se *sqlEngine) iterDBs(cb func(name string, db *dsqle.Database) (stop bool, err error)) error { +func (se *sqlEngine) iterDBs(cb func(name string, db dsqle.Database) (stop bool, err error)) error { for name, db := range se.dbs { stop, err := cb(name, db) diff --git a/go/cmd/dolt/commands/sqlserver/server.go b/go/cmd/dolt/commands/sqlserver/server.go index feb2679a83..3ebaf01d52 100644 --- a/go/cmd/dolt/commands/sqlserver/server.go +++ b/go/cmd/dolt/commands/sqlserver/server.go @@ -186,15 +186,15 @@ func newSessionBuilder(sqlEngine *sqle.Engine) server.SessionBuilder { } } -func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) *dsqle.Database { - return dsqle.NewAutoCommitDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) +func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { + return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) } -func dbsAsDSQLDBs(dbs []sql.Database) []*dsqle.Database { - dsqlDBs := make([]*dsqle.Database, 0, len(dbs)) +func dbsAsDSQLDBs(dbs []sql.Database) []dsqle.Database { + dsqlDBs := make([]dsqle.Database, 0, len(dbs)) for _, db := range dbs { - dsqlDB, ok := db.(*dsqle.Database) + dsqlDB, ok := db.(dsqle.Database) if ok { dsqlDBs = append(dsqlDBs, dsqlDB) diff --git a/go/cmd/dolt/commands/sqlserver/server_test.go b/go/cmd/dolt/commands/sqlserver/server_test.go index d8044a78b2..e52b55e68a 100644 --- a/go/cmd/dolt/commands/sqlserver/server_test.go +++ b/go/cmd/dolt/commands/sqlserver/server_test.go @@ -46,7 +46,7 @@ var ( func TestServerArgs(t *testing.T) { serverController := CreateServerController() go func() { - SqlServerImpl(context.Background(), "dolt sql-server", []string{ + startServer(context.Background(), "test", "dolt sql-server", []string{ "-H", "localhost", "-P", "15200", "-u", "username", @@ -84,7 +84,7 @@ func TestServerBadArgs(t *testing.T) { t.Run(strings.Join(test, " "), func(t *testing.T) { serverController := CreateServerController() go func(serverController *ServerController) { - SqlServerImpl(context.Background(), "dolt sql-server", test, env, serverController) + startServer(context.Background(), "test", "dolt sql-server", test, env, serverController) }(serverController) // In the event that a test fails, we need to prevent a test from hanging due to a running server err := serverController.WaitForStart() diff --git a/go/cmd/dolt/commands/sqlserver/sqlserver.go b/go/cmd/dolt/commands/sqlserver/sqlserver.go index f362a35caf..4b84c51052 100644 --- a/go/cmd/dolt/commands/sqlserver/sqlserver.go +++ b/go/cmd/dolt/commands/sqlserver/sqlserver.go @@ -97,12 +97,12 @@ func (cmd SqlServerCmd) RequiresRepo() bool { // Exec executes the command func (cmd SqlServerCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int { - return cmd.startServer(ctx, commandStr, args, dEnv, nil) + return startServer(ctx, commandStr, cmd.VersionStr, args, dEnv, nil) } -func (cmd SqlServerCmd) startServer(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int { +func startServer(ctx context.Context, versionStr, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int { serverConfig := DefaultServerConfig() - serverConfig.Version = cmd.VersionStr + serverConfig.Version = versionStr ap := createArgParser(serverConfig) help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, sqlServerDocs, ap)) diff --git a/go/libraries/doltcore/sqle/common_test.go b/go/libraries/doltcore/sqle/common_test.go index 192d81176a..f57196e3d3 100644 --- a/go/libraries/doltcore/sqle/common_test.go +++ b/go/libraries/doltcore/sqle/common_test.go @@ -34,7 +34,7 @@ import ( // the targetSchema given is used to prepare all rows. func executeSelect(ctx context.Context, dEnv *env.DoltEnv, targetSch schema.Schema, root *doltdb.RootValue, query string) ([]row.Row, schema.Schema, error) { var err error - db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) engine, sqlCtx, err := NewTestEngine(ctx, db, root) if err != nil { return nil, nil, err @@ -70,7 +70,7 @@ func executeSelect(ctx context.Context, dEnv *env.DoltEnv, targetSch schema.Sche // Runs the query given and returns the error (if any). func executeModify(ctx context.Context, ddb *doltdb.DoltDB, root *doltdb.RootValue, query string) (*doltdb.RootValue, error) { - db := NewDatabase("dolt", root, ddb, nil) + db := NewDatabase("dolt", root, ddb, nil, nil) engine, sqlCtx, err := NewTestEngine(ctx, db, root) if err != nil { diff --git a/go/libraries/doltcore/sqle/database.go b/go/libraries/doltcore/sqle/database.go index 05d5774a12..8aa595f6c8 100644 --- a/go/libraries/doltcore/sqle/database.go +++ b/go/libraries/doltcore/sqle/database.go @@ -46,7 +46,6 @@ var ErrSystemTableAlter = errors.NewKind("Cannot alter table %s: system tables c const ( batched commitBehavior = iota single - autoCommit ) type tableCache struct { @@ -112,19 +111,20 @@ type Database struct { tc *tableCache } -var _ sql.Database = (*Database)(nil) -var _ sql.VersionedDatabase = (*Database)(nil) -var _ sql.TableDropper = (*Database)(nil) -var _ sql.TableCreator = (*Database)(nil) -var _ sql.TableRenamer = (*Database)(nil) +var _ sql.Database = Database{} +var _ sql.VersionedDatabase = Database{} +var _ sql.TableDropper = Database{} +var _ sql.TableCreator = Database{} +var _ sql.TableRenamer = Database{} // NewDatabase returns a new dolt database to use in queries. -func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) *Database { - return &Database{ +func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database { + return Database{ name: name, defRoot: defRoot, ddb: ddb, rsr: rsr, + rsw: rsw, batchMode: single, tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)}, } @@ -132,50 +132,36 @@ func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr // NewBatchedDatabase returns a new dolt database executing in batch insert mode. Integrators must call Flush() to // commit any outstanding edits. -func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) *Database { - return &Database{ - name: name, - defRoot: root, - ddb: ddb, - rsr: rsr, - batchMode: batched, - tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)}, - } -} - -// NewAutoCommitDatabase returns a new dolt database executing in autocommit mode. Every write operation will update -// the working set with the new root value. -func NewAutoCommitDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) *Database { - return &Database{ +func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database { + return Database{ name: name, defRoot: root, ddb: ddb, rsr: rsr, rsw: rsw, - batchMode: autoCommit, + batchMode: batched, tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)}, } } - // Name returns the name of this database, set at creation time. -func (db *Database) Name() string { +func (db Database) Name() string { return db.name } // GetDefaultRoot returns the default root of the database that is used by new sessions. -func (db *Database) GetDefaultRoot() *doltdb.RootValue { +func (db Database) GetDefaultRoot() *doltdb.RootValue { return db.defRoot } // GetDoltDB gets the underlying DoltDB of the Database -func (db *Database) GetDoltDB() *doltdb.DoltDB { +func (db Database) GetDoltDB() *doltdb.DoltDB { return db.ddb } // GetTableInsensitive is used when resolving tables in queries. It returns a best-effort case-insensitive match for // the table name given. -func (db *Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Table, bool, error) { +func (db Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Table, bool, error) { root, err := db.GetRoot(ctx) if err != nil { @@ -185,7 +171,7 @@ func (db *Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.T return db.GetTableInsensitiveWithRoot(ctx, root, tblName) } -func (db *Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltdb.RootValue, tblName string) (sql.Table, bool, error) { +func (db Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltdb.RootValue, tblName string) (sql.Table, bool, error) { lwrName := strings.ToLower(tblName) if strings.HasPrefix(lwrName, DoltDiffTablePrefix) { tblName = tblName[len(DoltDiffTablePrefix):] @@ -217,7 +203,7 @@ func (db *Database) GetTableInsensitiveWithRoot(ctx context.Context, root *doltd } // GetTableInsensitiveAsOf implements sql.VersionedDatabase -func (db *Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, asOf interface{}) (sql.Table, bool, error) { +func (db Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, asOf interface{}) (sql.Table, bool, error) { root, err := db.rootAsOf(ctx, asOf) if err != nil { return nil, false, err @@ -230,7 +216,7 @@ func (db *Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, // rootAsOf returns the root of the DB as of the expression given, which may be nil in the case that it refers to an // expression before the first commit. -func (db *Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootValue, error) { +func (db Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootValue, error) { switch x := asOf.(type) { case string: return db.getRootForCommitRef(ctx, x) @@ -241,7 +227,7 @@ func (db *Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootVa } } -func (db *Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.RootValue, error) { +func (db Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.RootValue, error) { cs, err := doltdb.NewCommitSpec("HEAD", db.rsr.CWBHeadRef().String()) if err != nil { return nil, err @@ -283,7 +269,7 @@ func (db *Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.Ro return nil, nil } -func (db *Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*doltdb.RootValue, error) { +func (db Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*doltdb.RootValue, error) { cs, err := doltdb.NewCommitSpec(commitRef, db.rsr.CWBHeadRef().String()) if err != nil { return nil, err @@ -303,7 +289,7 @@ func (db *Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*do } // GetTableNamesAsOf implements sql.VersionedDatabase -func (db *Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]string, error) { +func (db Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]string, error) { root, err := db.rootAsOf(ctx, time) if err != nil { return nil, err @@ -320,7 +306,7 @@ func (db *Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]str // getTable gets the table with the exact name given at the root value given. The database caches tables for all root // values to avoid doing schema lookups on every table lookup, which are expensive. -func (db *Database) getTable(ctx context.Context, root *doltdb.RootValue, tableName string) (sql.Table, bool, error) { +func (db Database) getTable(ctx context.Context, root *doltdb.RootValue, tableName string) (sql.Table, bool, error) { if table, ok := db.tc.Get(tableName, root); ok { return table, true, nil } @@ -368,7 +354,7 @@ func (db *Database) getTable(ctx context.Context, root *doltdb.RootValue, tableN // are filtered out. This method is used for queries that examine the schema of the database, e.g. show tables. Table // name resolution in queries is handled by GetTableInsensitive. Use GetAllTableNames for an unfiltered list of all // tables in user space. -func (db *Database) GetTableNames(ctx *sql.Context) ([]string, error) { +func (db Database) GetTableNames(ctx *sql.Context) ([]string, error) { tblNames, err := db.GetAllTableNames(ctx) if err != nil { return nil, err @@ -378,7 +364,7 @@ func (db *Database) GetTableNames(ctx *sql.Context) ([]string, error) { // GetAllTableNames returns all user-space tables, including system tables in user space // (e.g. dolt_docs, dolt_query_catalog). -func (db *Database) GetAllTableNames(ctx *sql.Context) ([]string, error) { +func (db Database) GetAllTableNames(ctx *sql.Context) ([]string, error) { root, err := db.GetRoot(ctx) if err != nil { @@ -402,13 +388,13 @@ func filterDoltInternalTables(tblNames []string) []string { return result } -func (db *Database) headKeyForDB() string { +func (db Database) headKeyForDB() string { return fmt.Sprintf("%s_head", db.name) } var hashType = sql.MustCreateString(query.Type_TEXT, 32, sql.Collation_ascii_bin) -func (db *Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { +func (db Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { dsess := DSessFromSess(ctx.Session) currRoot, dbRootOk := dsess.dbRoots[db.name] @@ -458,7 +444,7 @@ func (db *Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) { // Set a new root value for the database. Can be used if the dolt working // set value changes outside of the basic SQL execution engine. -func (db *Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { +func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { h, err := newRoot.HashOf() if err != nil { @@ -477,7 +463,7 @@ func (db *Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error { // LoadRootFromRepoState loads the root value from the repo state's working hash, then calls SetRoot with the loaded // root value. -func (db *Database) LoadRootFromRepoState(ctx *sql.Context) error { +func (db Database) LoadRootFromRepoState(ctx *sql.Context) error { workingHash := db.rsr.WorkingHash() root, err := db.ddb.ReadRootValue(ctx, workingHash) if err != nil { @@ -488,7 +474,7 @@ func (db *Database) LoadRootFromRepoState(ctx *sql.Context) error { } // DropTable drops the table with the name given -func (db *Database) DropTable(ctx *sql.Context, tableName string) error { +func (db Database) DropTable(ctx *sql.Context, tableName string) error { root, err := db.GetRoot(ctx) if err != nil { @@ -517,7 +503,7 @@ func (db *Database) DropTable(ctx *sql.Context, tableName string) error { } // CreateTable creates a table with the name and schema given. -func (db *Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schema) error { +func (db Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schema) error { if doltdb.HasDoltPrefix(tableName) { return ErrReservedTableName.New(tableName) } @@ -541,7 +527,7 @@ func (db *Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Sche } // Unlike the exported version, createTable doesn't enforce any table name checks. -func (db *Database) createTable(ctx *sql.Context, tableName string, sch sql.Schema) error { +func (db Database) createTable(ctx *sql.Context, tableName string, sch sql.Schema) error { root, err := db.GetRoot(ctx) if err != nil { @@ -583,7 +569,7 @@ func (db *Database) createTable(ctx *sql.Context, tableName string, sch sql.Sche } // RenameTable implements sql.TableRenamer -func (db *Database) RenameTable(ctx *sql.Context, oldName, newName string) error { +func (db Database) RenameTable(ctx *sql.Context, oldName, newName string) error { root, err := db.GetRoot(ctx) if err != nil { @@ -612,7 +598,7 @@ func (db *Database) RenameTable(ctx *sql.Context, oldName, newName string) error } // Flush flushes the current batch of outstanding changes and returns any errors. -func (db *Database) Flush(ctx *sql.Context) error { +func (db Database) Flush(ctx *sql.Context) error { root, err := db.GetRoot(ctx) if err != nil { @@ -641,7 +627,7 @@ func (db *Database) Flush(ctx *sql.Context) error { // CreateView implements sql.ViewCreator. Persists the view in the dolt database, so // it can exist in a sql session later. Returns sql.ErrExistingView if a view // with that name already exists. -func (db *Database) CreateView(ctx *sql.Context, name string, definition string) error { +func (db Database) CreateView(ctx *sql.Context, name string, definition string) error { tbl, err := GetOrCreateDoltSchemasTable(ctx, db) if err != nil { return err @@ -668,7 +654,7 @@ func (db *Database) CreateView(ctx *sql.Context, name string, definition string) // DropView implements sql.ViewDropper. Removes a view from persistence in the // dolt database. Returns sql.ErrNonExistingView if the view did not // exist. -func (db *Database) DropView(ctx *sql.Context, name string) error { +func (db Database) DropView(ctx *sql.Context, name string) error { stbl, found, err := db.GetTableInsensitive(ctx, doltdb.SchemasTableName) if err != nil { return err @@ -702,7 +688,7 @@ func (db *Database) DropView(ctx *sql.Context, name string) error { // there are I/O issues, but currently silently fails to register some // schema fragments if they don't parse, or if registries within the // `catalog` return errors. -func RegisterSchemaFragments(ctx *sql.Context, db *Database, root *doltdb.RootValue) error { +func RegisterSchemaFragments(ctx *sql.Context, db Database, root *doltdb.RootValue) error { stbl, found, err := db.GetTableInsensitiveWithRoot(ctx, root, doltdb.SchemasTableName) if err != nil { return err diff --git a/go/libraries/doltcore/sqle/dolt_session.go b/go/libraries/doltcore/sqle/dolt_session.go index 2f266eadeb..22733a24cb 100644 --- a/go/libraries/doltcore/sqle/dolt_session.go +++ b/go/libraries/doltcore/sqle/dolt_session.go @@ -42,7 +42,7 @@ func DefaultDoltSession() *DoltSession { } // NewSessionWithDefaultRoot creates a DoltSession object from a standard sql.Session and 0 or more Database objects. -func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...*Database) (*DoltSession, error) { +func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...Database) (*DoltSession, error) { dbRoots := make(map[string]dbRoot) for _, db := range dbs { defRoot := db.GetDefaultRoot() diff --git a/go/libraries/doltcore/sqle/indexes.go b/go/libraries/doltcore/sqle/indexes.go index cab31495e6..95b11647fa 100644 --- a/go/libraries/doltcore/sqle/indexes.go +++ b/go/libraries/doltcore/sqle/indexes.go @@ -29,11 +29,11 @@ import ( // IndexDriver implementation. Not ready for prime time. type DoltIndexDriver struct { - dbs map[string]*Database + dbs map[string]Database } -func NewDoltIndexDriver(dbs ...*Database) *DoltIndexDriver { - nameToDB := make(map[string]*Database) +func NewDoltIndexDriver(dbs ...Database) *DoltIndexDriver { + nameToDB := make(map[string]Database) for _, db := range dbs { nameToDB[db.Name()] = db } @@ -91,7 +91,7 @@ func (i *DoltIndexDriver) LoadAll(ctx *sql.Context, db, table string) ([]sql.Ind type doltIndex struct { sch schema.Schema tableName string - db *Database + db Database driver *DoltIndexDriver } diff --git a/go/libraries/doltcore/sqle/logictest/dolt/doltharness.go b/go/libraries/doltcore/sqle/logictest/dolt/doltharness.go index cfb517062d..bee31a05c0 100644 --- a/go/libraries/doltcore/sqle/logictest/dolt/doltharness.go +++ b/go/libraries/doltcore/sqle/logictest/dolt/doltharness.go @@ -335,7 +335,7 @@ func resetEnv(root *doltdb.RootValue) *doltdb.RootValue { } func sqlNewEngine(ddb *doltdb.DoltDB, root *doltdb.RootValue) (*sqle.Engine, error) { - db := dsql.NewDatabase("dolt", root, ddb, nil) + db := dsql.NewDatabase("dolt", root, ddb, nil, nil) engine := sqle.NewDefault() engine.AddDatabase(db) diff --git a/go/libraries/doltcore/sqle/schema_table.go b/go/libraries/doltcore/sqle/schema_table.go index 361be97624..cb9304bf61 100644 --- a/go/libraries/doltcore/sqle/schema_table.go +++ b/go/libraries/doltcore/sqle/schema_table.go @@ -35,7 +35,7 @@ func SchemasTableSchema() sql.Schema { } // GetOrCreateDoltSchemasTable returns the `dolt_schemas` table in `db`, creating it if it does not already exist. -func GetOrCreateDoltSchemasTable(ctx *sql.Context, db *Database) (*WritableDoltTable, error) { +func GetOrCreateDoltSchemasTable(ctx *sql.Context, db Database) (*WritableDoltTable, error) { tbl, found, err := db.GetTableInsensitive(ctx, doltdb.SchemasTableName) if err != nil { return nil, err diff --git a/go/libraries/doltcore/sqle/sqlbatch_test.go b/go/libraries/doltcore/sqle/sqlbatch_test.go index 629a4063ba..9e16aaf29f 100644 --- a/go/libraries/doltcore/sqle/sqlbatch_test.go +++ b/go/libraries/doltcore/sqle/sqlbatch_test.go @@ -63,7 +63,7 @@ func TestSqlBatchInserts(t *testing.T) { CreateTestDatabase(dEnv, t) root, _ := dEnv.WorkingRoot(ctx) - db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) engine, sqlCtx, err := NewTestEngine(ctx, db, root) require.NoError(t, err) @@ -151,7 +151,7 @@ func TestSqlBatchInsertIgnoreReplace(t *testing.T) { CreateTestDatabase(dEnv, t) root, _ := dEnv.WorkingRoot(ctx) - db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) engine, sqlCtx, err := NewTestEngine(ctx, db, root) require.NoError(t, err) @@ -189,7 +189,7 @@ func TestSqlBatchInsertErrors(t *testing.T) { CreateTestDatabase(dEnv, t) root, _ := dEnv.WorkingRoot(ctx) - db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) engine, sqlCtx, err := NewTestEngine(ctx, db, root) require.NoError(t, err) diff --git a/go/libraries/doltcore/sqle/table_editor_test.go b/go/libraries/doltcore/sqle/table_editor_test.go index f9c07961dd..1c181a8f04 100644 --- a/go/libraries/doltcore/sqle/table_editor_test.go +++ b/go/libraries/doltcore/sqle/table_editor_test.go @@ -157,7 +157,7 @@ func TestTableEditor(t *testing.T) { ctx := NewTestSQLCtx(context.Background()) root, _ := dEnv.WorkingRoot(context.Background()) - db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) err := db.SetRoot(ctx, root) require.NoError(t, err) peopleTable, _, err := db.GetTableInsensitive(ctx, "people") diff --git a/go/libraries/doltcore/sqle/tables.go b/go/libraries/doltcore/sqle/tables.go index 821d9c5621..e5d17a3c3e 100644 --- a/go/libraries/doltcore/sqle/tables.go +++ b/go/libraries/doltcore/sqle/tables.go @@ -35,7 +35,7 @@ type DoltTable struct { table *doltdb.Table sch schema.Schema sqlSch sql.Schema - db *Database + db Database } var _ sql.Table = (*DoltTable)(nil) diff --git a/go/libraries/doltcore/sqle/testutil.go b/go/libraries/doltcore/sqle/testutil.go index d933aa8e36..2508bab5fe 100644 --- a/go/libraries/doltcore/sqle/testutil.go +++ b/go/libraries/doltcore/sqle/testutil.go @@ -32,7 +32,7 @@ import ( // Executes all the SQL non-select statements given in the string against the root value given and returns the updated // root, or an error. Statements in the input string are split by `;\n` func ExecuteSql(dEnv *env.DoltEnv, root *doltdb.RootValue, statements string) (*doltdb.RootValue, error) { - db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState) + db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) engine, ctx, err := NewTestEngine(context.Background(), db, root) if err != nil { @@ -99,7 +99,7 @@ func NewTestSQLCtx(ctx context.Context) *sql.Context { } // NewTestEngine creates a new default engine, and a *sql.Context and initializes indexes and schema fragments. -func NewTestEngine(ctx context.Context, db *Database, root *doltdb.RootValue) (*sqle.Engine, *sql.Context, error) { +func NewTestEngine(ctx context.Context, db Database, root *doltdb.RootValue) (*sqle.Engine, *sql.Context, error) { engine := sqle.NewDefault() engine.AddDatabase(db) @@ -130,7 +130,7 @@ func NewTestEngine(ctx context.Context, db *Database, root *doltdb.RootValue) (* // Executes the select statement given and returns the resulting rows, or an error if one is encountered. // This uses the index functionality, which is not ready for prime time. Use with caution. func ExecuteSelect(ddb *doltdb.DoltDB, root *doltdb.RootValue, query string) ([]sql.Row, error) { - db := NewDatabase("dolt", root, ddb, nil) + db := NewDatabase("dolt", root, ddb, nil, nil) engine, ctx, err := NewTestEngine(context.Background(), db, root) if err != nil { return nil, err From 7377bf7840721cc28e245bd7881e84a4a98a6a39 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 10:25:29 -0700 Subject: [PATCH 40/60] Fixed batch mode stats bug and an error in constructor for non-batch database execution Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 14 +++++++------- go/cmd/dolt/commands/sql_test.go | 2 +- .../doltcore/dtestutils/testcommands/command.go | 2 +- .../doltcore/envtestutils/rebase_tag_test.go | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 74f343750c..d4aa560c96 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -355,7 +355,7 @@ func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEn } func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, query string, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) { - dbs := CollectDBs(mrEnv, roots, newBatchedDatabase) + dbs := CollectDBs(mrEnv, roots, newDatabase) se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...) if err != nil { return nil, errhand.VerboseErrorFromError(err) @@ -1047,16 +1047,16 @@ func mergeResultIntoStats(statement sqlparser.Statement, rowIter sql.RowIter, s } else if err != nil { return err } else { - numRowsUpdated := row[0].(int64) - s.unflushedEdits += int(numRowsUpdated) - s.unprintedEdits += int(numRowsUpdated) + okResult := row[0].(sql.OkResult) + s.unflushedEdits += int(okResult.RowsAffected) + s.unprintedEdits += int(okResult.RowsAffected) switch statement.(type) { case *sqlparser.Insert: - s.rowsInserted += int(numRowsUpdated) + s.rowsInserted += int(okResult.RowsAffected) case *sqlparser.Delete: - s.rowsDeleted += int(numRowsUpdated) + s.rowsDeleted += int(okResult.RowsAffected) case *sqlparser.Update: - s.rowsUpdated += int(numRowsUpdated) + s.rowsUpdated += int(okResult.RowsAffected) } } } diff --git a/go/cmd/dolt/commands/sql_test.go b/go/cmd/dolt/commands/sql_test.go index cbcc5130aa..53d2a74938 100644 --- a/go/cmd/dolt/commands/sql_test.go +++ b/go/cmd/dolt/commands/sql_test.go @@ -369,7 +369,7 @@ func TestInsert(t *testing.T) { } for _, test := range tests { - t.Run(test.query, func(t *testing.T) { + t.Run(test.name, func(t *testing.T) { dEnv := createEnvWithSeedData(t) args := []string{"-q", test.query} diff --git a/go/libraries/doltcore/dtestutils/testcommands/command.go b/go/libraries/doltcore/dtestutils/testcommands/command.go index be4413209d..dee821b4e3 100644 --- a/go/libraries/doltcore/dtestutils/testcommands/command.go +++ b/go/libraries/doltcore/dtestutils/testcommands/command.go @@ -111,7 +111,7 @@ func (q Query) CommandString() string { return fmt.Sprintf("query %s", q.Query) func (q Query) Exec(t *testing.T, dEnv *env.DoltEnv) error { root, err := dEnv.WorkingRoot(context.Background()) require.NoError(t, err) - sqlDb := dsqle.NewDatabase("dolt", root, dEnv.DoltDB, nil) + sqlDb := dsqle.NewDatabase("dolt", root, dEnv.DoltDB, nil, nil) engine, sqlCtx, err := dsqle.NewTestEngine(context.Background(), sqlDb, root) require.NoError(t, err) diff --git a/go/libraries/doltcore/envtestutils/rebase_tag_test.go b/go/libraries/doltcore/envtestutils/rebase_tag_test.go index 83fbb895e1..95886776b1 100644 --- a/go/libraries/doltcore/envtestutils/rebase_tag_test.go +++ b/go/libraries/doltcore/envtestutils/rebase_tag_test.go @@ -531,7 +531,7 @@ func checkSchema(t *testing.T, r *doltdb.RootValue, tableName string, expectedSc } func checkRows(t *testing.T, ddb *doltdb.DoltDB, root *doltdb.RootValue, tableName string, sch schema.Schema, selectQuery string, expectedRows []row.Row) { - sqlDb := dsqle.NewDatabase("dolt", root, ddb, nil) + sqlDb := dsqle.NewDatabase("dolt", root, ddb, nil, nil) engine, sqlCtx, err := dsqle.NewTestEngine(context.Background(), sqlDb, root) require.NoError(t, err) From 6465fdbd3fdf8658c1113951f953e8bc7ccecd73 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 10:26:21 -0700 Subject: [PATCH 41/60] Moved an identifier up Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index d4aa560c96..830f0c0441 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -346,6 +346,8 @@ func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } +type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database + func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database { return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter()) } @@ -383,8 +385,6 @@ func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do return newRoots, nil } -type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database - // CollectDBs takes a MultiRepoEnv and creates Database objects from each environment and returns a slice of these // objects. func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []dsqle.Database { From 22d27adbc5ff9f9d1ffc5c2ccc2aeacd1d88aa9b Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 10:27:20 -0700 Subject: [PATCH 42/60] Formatting Signed-off-by: Zach Musgrave --- go/cmd/dolt/commands/sql.go | 2 +- go/libraries/doltcore/env/repo_state.go | 2 +- go/libraries/doltcore/sqle/dolt_session.go | 5 +++-- .../doltcore/table/untyped/nullprinter/nullprinter.go | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 830f0c0441..03eb89ed93 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -19,7 +19,6 @@ import ( "bytes" "context" "fmt" - "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json" "io" "os" "path/filepath" @@ -46,6 +45,7 @@ import ( dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/pipeline" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/csv" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/fwt" diff --git a/go/libraries/doltcore/env/repo_state.go b/go/libraries/doltcore/env/repo_state.go index 46ea113f3c..b9358f1b64 100644 --- a/go/libraries/doltcore/env/repo_state.go +++ b/go/libraries/doltcore/env/repo_state.go @@ -35,7 +35,7 @@ type RepoStateWriter interface { // SetCWBHeadRef(context.Context, ref.DoltRef) error // SetCWBHeadSpec(context.Context, *doltdb.CommitSpec) error SetWorkingHash(context.Context, hash.Hash) error -// SetStagedHash(context.Context, hash.Hash) error + // SetStagedHash(context.Context, hash.Hash) error } type BranchConfig struct { diff --git a/go/libraries/doltcore/sqle/dolt_session.go b/go/libraries/doltcore/sqle/dolt_session.go index 22733a24cb..94b14dea41 100644 --- a/go/libraries/doltcore/sqle/dolt_session.go +++ b/go/libraries/doltcore/sqle/dolt_session.go @@ -15,9 +15,10 @@ package sqle import ( - "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" "github.com/src-d/go-mysql-server/sql" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb" ) @@ -85,4 +86,4 @@ func (sess *DoltSession) CommitTransaction(ctx *sql.Context) error { } return dbRoot.rsw.SetWorkingHash(ctx, h) -} \ No newline at end of file +} diff --git a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go index 7395cb3428..cc678a7bc5 100644 --- a/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go +++ b/go/libraries/doltcore/table/untyped/nullprinter/nullprinter.go @@ -27,7 +27,7 @@ const NullPrintingStage = "null printing" // NullPrinter is a utility to convert nil values in rows to a string representation. type NullPrinter struct { - Sch schema.Schema + Sch schema.Schema nullStr string } From d93310affe6f36ddef1b75b83a591a2ce36102d8 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 11:22:15 -0700 Subject: [PATCH 43/60] Fixed bats expected output, fixed display bug for 0 rows affected Signed-off-by: Zach Musgrave --- bats/1pk5col-ints.bats | 39 ++++++++++++++++++++++--------------- bats/1pk5col-strings.bats | 4 ++-- bats/2pk5cols-ints.bats | 6 +++--- bats/git-dolt.bats | 1 + bats/sql-multi-db.bats | 4 ++-- go/cmd/dolt/commands/sql.go | 2 +- 6 files changed, 32 insertions(+), 24 deletions(-) diff --git a/bats/1pk5col-ints.bats b/bats/1pk5col-ints.bats index 4edd476661..cc5c5acddf 100755 --- a/bats/1pk5col-ints.bats +++ b/bats/1pk5col-ints.bats @@ -114,18 +114,18 @@ if rows[2] != "9,8,7,6,5,4".split(","): @test "dolt sql all manner of inserts" { run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,6,6,6,6,6)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "select * from test" [[ "$output" =~ "6" ]] || false run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (1,7,7,7,7,7),(2,8,8,8,8,8)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 |" ]] || false + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false run dolt sql -q "select * from test" [[ "$output" =~ "7" ]] || false [[ "$output" =~ "8" ]] || false run dolt sql -q "insert into test (pk,c1,c3,c5) values (3,9,9,9)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "select * from test" [[ "$output" =~ "9" ]] || false run dolt sql -q "insert into test (c1,c3,c5) values (50,55,60)" @@ -148,7 +148,7 @@ if rows[2] != "9,8,7,6,5,4".split(","): @test "dolt sql insert no columns specified" { run dolt sql -q "insert into test values (0,0,0,0,0,0)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "select * from test" [[ "$output" =~ "0" ]] || false run dolt sql -q "insert into test values (4,1,2)" @@ -171,21 +171,23 @@ if rows[2] != "9,8,7,6,5,4".split(","): dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,6,6,6,6,6)" run dolt sql -q "replace into test (pk,c1,c2,c3,c4,c5) values (0,7,7,7,7,7),(1,8,8,8,8,8)" [ "$status" -eq 0 ] - [[ "${lines[1]}" =~ "updated" ]] || false + # No skip, but this is a bug in the output. Query produces the right result, but counts it incorrectly + [[ "$output" =~ "Query OK, 4 rows affected" ]] || false ## No skip, but this should report 3 but is reporting 4 [[ "${lines[3]}" =~ "3" ]] || false run dolt sql -q "select * from test" [[ "$output" =~ "7" ]] || false [[ "$output" =~ "8" ]] || false [[ ! "$output" =~ "6" ]] || false + skip "replace into output is incorrect" } @test "dolt sql insert and dolt sql select" { run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (101,102,103,104,105,106),(1,6,7,8,9,10)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 |" ]] || false + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ \|[[:space:]]+c5 ]] || false @@ -287,22 +289,26 @@ if rows[2] != "9,8,7,6,5,4".split(","): dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" run dolt sql -q "update test set c1=6,c2=7,c3=8,c4=9,c5=10 where pk=0" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 | 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false + [[ "$output" =~ "Rows matched: 1 Changed: 1 Warnings: 0" ]] || false run dolt sql -q "select * from test where pk=0" [ "$status" -eq 0 ] [[ "$output" =~ "10" ]] || false [[ ! "$output" =~ "|5" ]] || false dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (4,11,12,13,14,15)" run dolt sql -q "update test set c2=11,c3=11,c4=11,c5=11 where c1=11" + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false + [[ "$output" =~ "Rows matched: 2 Changed: 2 Warnings: 0" ]] || false [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 | 2 |" ]] || false + [[ "$output" =~ "" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "11" ]] || false [[ ! "$output" =~ "12" ]] || false run dolt sql -q "update test set c2=50,c3=50,c4=50,c5=50 where c1=50" [ "$status" -eq 0 ] - [[ "$output" =~ "| 0 | 0 |" ]] || false + [[ "$output" =~ "Query OK, 0 rows affected" ]] || false + [[ "$output" =~ "Rows matched: 0 Changed: 0 Warnings: 0" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ ! "$output" =~ "50" ]] || false @@ -314,7 +320,8 @@ if rows[2] != "9,8,7,6,5,4".split(","): [ "$output" = "unable to cast \"foo\" of type string to int64" ] run dolt sql -q "update test set c1=100,c2=100,c3=100,c4=100,c5=100 where pk>0" [ "$status" -eq 0 ] - [[ "$output" =~ "| 3 | 3 |" ]] || false + [[ "$output" =~ "Query OK, 3 rows affected" ]] || false + [[ "$output" =~ "Rows matched: 3 Changed: 3 Warnings: 0" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "100" ]] || false @@ -326,24 +333,24 @@ if rows[2] != "9,8,7,6,5,4".split(","): dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" run dolt sql -q "delete from test where pk=2" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "delete from test" [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 |" ]] || false + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" run dolt sql -q "delete from test where pk>0" [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 |" ]] || false + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false run dolt sql -q "delete from test where c1=1" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)" run dolt sql -q "delete from test where c10=1" [ "$status" -eq 1 ] [ "$output" = "column \"c10\" could not be found in any table in scope" ] run dolt sql -q "delete from test where c1='foo'" [ "$status" -eq 0 ] - [[ "$output" =~ "| 0 |" ]] || false + [[ "$output" =~ "Query OK, 0 rows affected" ]] || false } @test "dolt checkout to put a table back to its checked in state" { diff --git a/bats/1pk5col-strings.bats b/bats/1pk5col-strings.bats index 497e2e5572..285861cdf8 100755 --- a/bats/1pk5col-strings.bats +++ b/bats/1pk5col-strings.bats @@ -78,7 +78,7 @@ teardown() { @test "interact with a strings type table with sql" { run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values ('tim','is','super','duper','rad','fo sho')" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "c5" ]] || false @@ -124,4 +124,4 @@ teardown() { run dolt sql <<< "insert into test (pk,c1) values ('test3', 'this \\\\'' should \\\\'' work')" [ "$status" -eq 0 ] [[ "$output" =~ "Rows inserted: 1" ]] -} \ No newline at end of file +} diff --git a/bats/2pk5cols-ints.bats b/bats/2pk5cols-ints.bats index c47e655916..170786a052 100755 --- a/bats/2pk5cols-ints.bats +++ b/bats/2pk5cols-ints.bats @@ -76,14 +76,14 @@ teardown() { @test "interact with a multiple primary key table with sql" { run dolt sql -q "insert into test (pk1,pk2,c1,c2,c3,c4,c5) values (0,0,6,6,6,6,6)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 1 |" ]] || false + [[ "$output" =~ "Query OK, 1 row affected" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "c5" ]] || false [[ "$output" =~ "6" ]] || false run dolt sql -q "insert into test (pk1,pk2,c1,c2,c3,c4,c5) values (0,1,7,7,7,7,7),(1,0,8,8,8,8,8)" [ "$status" -eq 0 ] - [[ "$output" =~ "| 2 |" ]] || false + [[ "$output" =~ "Query OK, 2 rows affected" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "c5" ]] || false @@ -103,4 +103,4 @@ teardown() { run dolt sql -q "insert into test (c1,c2,c3,c4,c5) values (6,6,6,6,6)" [ "$status" -eq 1 ] [ "$output" = "column name 'pk1' is non-nullable but attempted to set default value of null" ] || false -} \ No newline at end of file +} diff --git a/bats/git-dolt.bats b/bats/git-dolt.bats index 0d291f8652..1293fe7a0e 100755 --- a/bats/git-dolt.bats +++ b/bats/git-dolt.bats @@ -24,6 +24,7 @@ teardown() { @test "git dolt install sets up a smudge filter in the current git repository" { init_git_repo + run git dolt install [ "$status" -eq 0 ] [[ "${lines[0]}" =~ "Installed git-dolt smudge filter" ]] || false diff --git a/bats/sql-multi-db.bats b/bats/sql-multi-db.bats index 3a4cff0ca4..bddc6f2b11 100644 --- a/bats/sql-multi-db.bats +++ b/bats/sql-multi-db.bats @@ -29,7 +29,7 @@ seed_repos_with_tables_with_use_statements() { } @test "sql multi-db test show databases" { - EXPECTED=$(echo -e "Database\nrepo1\nrepo2") + EXPECTED=$(echo -e "Database\ninformation_schema\nrepo1\nrepo2") run dolt sql -r csv --multi-db-dir ./ -q "SHOW DATABASES" [ "$status" -eq 0 ] [[ "$output" =~ "$EXPECTED" ]] || false @@ -82,4 +82,4 @@ seed_repos_with_tables_with_use_statements() { echo \"\"\"$output\"\"\" [ "$status" -eq 0 ] [[ "$output" =~ "$EXPECTED" ]] || false -} \ No newline at end of file +} diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index 03eb89ed93..ec116e7456 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -1299,7 +1299,7 @@ func printOKResult(ctx context.Context, iter sql.RowIter) error { if okResult, ok := row[0].(sql.OkResult); ok { rowNoun := "row" - if okResult.RowsAffected > 1 { + if okResult.RowsAffected != 1 { rowNoun = "rows" } cli.Printf("Query OK, %d %s affected\n", okResult.RowsAffected, rowNoun) From ac4c6f9bca09a206c215614a8253d26aef8c77b1 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 11:41:23 -0700 Subject: [PATCH 44/60] Updated to latest go-msyql-server, vitess Signed-off-by: Zach Musgrave --- go/go.mod | 6 ++---- go/go.sum | 14 ++++---------- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/go/go.mod b/go/go.mod index 225af120c8..9307c94824 100644 --- a/go/go.mod +++ b/go/go.mod @@ -87,10 +87,8 @@ require ( replace github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi => ./gen/proto/dolt/services/eventsapi -replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd +replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140 -//replace github.com/src-d/go-mysql-server => ../../go-mysql-server - -replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9 +replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee go 1.13 diff --git a/go/go.sum b/go/go.sum index c7dc307bfe..88a5a8fe68 100644 --- a/go/go.sum +++ b/go/go.sum @@ -359,22 +359,16 @@ github.com/krishicks/yaml-patch v0.0.10/go.mod h1:Sm5TchwZS6sm7RJoyg87tzxm2ZcKzd github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6 h1:iKET+xfMh3NaiiIbrMBLi+MJ9hwmm++7DBtPGfarf50= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4 h1:UIksBT7bRENT38ErKSz+auGLc7a5tDpCHwNhuajoJbU= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2 h1:l4mXLvgHMoihWuEqcmcJKEvQtAccHxhsKwkVn/okoxc= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd h1:SGGh7+XPqPYw3LaIK4VUvy/81Za1Y3p29lh4WDMtXh0= -github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd/go.mod h1:xu1cUi3vfWVJZ/9mQl9f8sdfJGobnS7kIucM3lfWIPk= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140 h1:rxT0Pkt2ZLS0P4m8scQ3TATRjKYcntF6F0X5/yHcIDg= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140/go.mod h1:tK/saWoda2x+KXyGsdVariMdfVOsjmRgQF2pbl4Mr1E= github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0 h1:phMgajKClMUiIr+hF2LGt8KRuUa2Vd2GI1sNgHgSXoU= github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0/go.mod h1:YC1rI9k5gx8D02ljlbxDfZe80s/iq8bGvaaQsvR+qxs= github.com/liquidata-inc/mmap-go v1.0.3 h1:2LndAeAtup9rpvUmu4wZSYCsjCQ0Zpc+NqE+6+PnT7g= github.com/liquidata-inc/mmap-go v1.0.3/go.mod h1:w0doE7jfkuDEZyxb/zD3VWnRaQBYx1uDTS816kH8HoY= github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo= github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU= -github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9 h1:eaE6IFxMviaDSNFaKlTbNPA/+0Vhj/XgV6lG2SaoAWM= -github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= +github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee h1:r8ApUMNHHEyzRhPbuIHrWbr7FOTW4Yo5Sm1HpOEzPrQ= +github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= From c8436c172144565dd85efb4462ce46a443d6a3ab Mon Sep 17 00:00:00 2001 From: Brian Hendriks Date: Tue, 14 Apr 2020 13:18:27 -0700 Subject: [PATCH 45/60] Help Fix (#576) --- bats/no-repo.bats | 4 +- go/cmd/dolt/cli/command.go | 13 +-- go/cmd/dolt/cli/command_test.go | 13 ++- go/libraries/utils/argparser/parser.go | 22 +++-- go/libraries/utils/argparser/parser_test.go | 92 +++++++++++++++++++++ 5 files changed, 129 insertions(+), 15 deletions(-) create mode 100644 go/libraries/utils/argparser/parser_test.go diff --git a/bats/no-repo.bats b/bats/no-repo.bats index 7d008248f8..713ecac498 100755 --- a/bats/no-repo.bats +++ b/bats/no-repo.bats @@ -226,6 +226,6 @@ NOT_VALID_REPO_ERROR="The current directory is not a valid dolt repository." @test "all versions of help work outside a repository" { dolt checkout --help dolt checkout -help - skip "No dashes in front of help segfaults right now" - dolt checkout help + run dolt checkout help + [ "$status" -ne 0 ] } diff --git a/go/cmd/dolt/cli/command.go b/go/cmd/dolt/cli/command.go index 6023c83b38..d3e948fd8d 100644 --- a/go/cmd/dolt/cli/command.go +++ b/go/cmd/dolt/cli/command.go @@ -27,14 +27,15 @@ import ( ) func isHelp(str string) bool { - switch { - case str == "-h": - return true - case strings.TrimLeft(str, "- ") == "help": - return true + str = strings.TrimSpace(str) + + if str[0] != '-' { + return false } - return false + str = strings.ToLower(strings.TrimLeft(str, "- ")) + + return str == "h" || str == "help" } func hasHelpFlag(args []string) bool { diff --git a/go/cmd/dolt/cli/command_test.go b/go/cmd/dolt/cli/command_test.go index d3f89db4d7..b7c36e7cd5 100644 --- a/go/cmd/dolt/cli/command_test.go +++ b/go/cmd/dolt/cli/command_test.go @@ -20,9 +20,10 @@ import ( "strings" "testing" - "github.com/liquidata-inc/dolt/go/libraries/utils/filesys" + "github.com/stretchr/testify/assert" "github.com/liquidata-inc/dolt/go/libraries/doltcore/env" + "github.com/liquidata-inc/dolt/go/libraries/utils/filesys" ) const ( @@ -128,3 +129,13 @@ func runCommand(root Command, commandLine string) int { return root.Exec(context.Background(), appName, tokens[1:], nil) } + +func TestHasHelpFlag(t *testing.T) { + assert.False(t, hasHelpFlag([]string{})) + assert.False(t, hasHelpFlag([]string{"help"})) + assert.True(t, hasHelpFlag([]string{"--help"})) + assert.True(t, hasHelpFlag([]string{"-h"})) + assert.False(t, hasHelpFlag([]string{"--param", "value", "--flag", "help", "arg2", "arg3"})) + assert.True(t, hasHelpFlag([]string{"--param", "value", "-f", "--help", "arg1", "arg2"})) + assert.True(t, hasHelpFlag([]string{"--param", "value", "--flag", "-h", "arg1", "arg2"})) +} diff --git a/go/libraries/utils/argparser/parser.go b/go/libraries/utils/argparser/parser.go index 0f3e9016a0..fc0633312a 100644 --- a/go/libraries/utils/argparser/parser.go +++ b/go/libraries/utils/argparser/parser.go @@ -84,32 +84,42 @@ func (ap *ArgParser) SupportOption(opt *Option) { } // Adds support for a new flag (argument with no value). See SupportOpt for details on params. -func (ap *ArgParser) SupportsFlag(name, abbrev, desc string) { +func (ap *ArgParser) SupportsFlag(name, abbrev, desc string) *ArgParser { opt := &Option{name, abbrev, "", OptionalFlag, desc, nil} ap.SupportOption(opt) + + return ap } // Adds support for a new string argument with the description given. See SupportOpt for details on params. -func (ap *ArgParser) SupportsString(name, abbrev, valDesc, desc string) { +func (ap *ArgParser) SupportsString(name, abbrev, valDesc, desc string) *ArgParser { opt := &Option{name, abbrev, valDesc, OptionalValue, desc, nil} ap.SupportOption(opt) + + return ap } -func (ap *ArgParser) SupportsValidatedString(name, abbrev, valDesc, desc string, validator ValidationFunc) { +func (ap *ArgParser) SupportsValidatedString(name, abbrev, valDesc, desc string, validator ValidationFunc) *ArgParser { opt := &Option{name, abbrev, valDesc, OptionalValue, desc, validator} ap.SupportOption(opt) + + return ap } // Adds support for a new uint argument with the description given. See SupportOpt for details on params. -func (ap *ArgParser) SupportsUint(name, abbrev, valDesc, desc string) { +func (ap *ArgParser) SupportsUint(name, abbrev, valDesc, desc string) *ArgParser { opt := &Option{name, abbrev, valDesc, OptionalValue, desc, isUintStr} ap.SupportOption(opt) + + return ap } // Adds support for a new int argument with the description given. See SupportOpt for details on params. -func (ap *ArgParser) SupportsInt(name, abbrev, valDesc, desc string) { +func (ap *ArgParser) SupportsInt(name, abbrev, valDesc, desc string) *ArgParser { opt := &Option{name, abbrev, valDesc, OptionalValue, desc, isIntStr} ap.SupportOption(opt) + + return ap } func splitOption(optStr string) (string, *string) { @@ -135,7 +145,7 @@ func splitOption(optStr string) (string, *string) { // methods. Any unrecognized arguments or incorrect types will result in an appropriate error being returned. If the // universal --help or -h flag is found, an ErrHelp error is returned. func (ap *ArgParser) Parse(args []string) (*ArgParseResults, error) { - var list []string + list := make([]string, 0, 16) results := make(map[string]string) i := 0 diff --git a/go/libraries/utils/argparser/parser_test.go b/go/libraries/utils/argparser/parser_test.go new file mode 100644 index 0000000000..3a0a6cd0d0 --- /dev/null +++ b/go/libraries/utils/argparser/parser_test.go @@ -0,0 +1,92 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package argparser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestArgParser(t *testing.T) { + tests := []struct { + ap *ArgParser + args []string + expectedErr error + expectedOptions map[string]string + expectedArgs []string + }{ + { + NewArgParser(), + []string{}, + nil, + map[string]string{}, + []string{}, + }, + { + NewArgParser(), + []string{"arg1", "arg2"}, + nil, + map[string]string{}, + []string{"arg1", "arg2"}, + }, + { + NewArgParser(), + []string{"--unknown_flag"}, + UnknownArgumentParam{"unknown_flag"}, + map[string]string{}, + []string{}, + }, + { + NewArgParser(), + []string{"--help"}, + ErrHelp, + map[string]string{}, + []string{}, + }, + { + NewArgParser(), + []string{"-h"}, + ErrHelp, + map[string]string{}, + []string{}, + }, + { + NewArgParser(), + []string{"help"}, + nil, + map[string]string{}, + []string{"help"}, + }, + { + NewArgParser().SupportsString("param", "p", "", ""), + []string{"--param", "value", "arg1"}, + nil, + map[string]string{"param": "value"}, + []string{"arg1"}, + }, + } + + for _, test := range tests { + apr, err := test.ap.Parse(test.args) + require.Equal(t, test.expectedErr, err) + + if err == nil { + assert.Equal(t, test.expectedOptions, apr.options) + assert.Equal(t, test.expectedArgs, apr.args) + } + } +} From 3a3958c44107bda2fe1a5b4cf4b0ffd9a641727b Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Tue, 14 Apr 2020 13:28:32 -0700 Subject: [PATCH 46/60] code review changes --- go/cmd/dolt/commands/migrate.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go/cmd/dolt/commands/migrate.go b/go/cmd/dolt/commands/migrate.go index c6b84b1057..88844e3c8e 100644 --- a/go/cmd/dolt/commands/migrate.go +++ b/go/cmd/dolt/commands/migrate.go @@ -123,7 +123,7 @@ func migrateLocalRepo(ctx context.Context, dEnv *env.DoltEnv) error { if !remoteMigrated { cli.Println(fmt.Sprintf("Remote %s has not been migrated", remoteName)) - cli.Println(fmt.Sprintf("Run 'dolt migrate --push' %s to update remote", remoteName)) + cli.Println(fmt.Sprintf("Run 'dolt migrate --push %s' to update remote", remoteName)) } else { cli.Println(fmt.Sprintf("Remote %s has been migrated", remoteName)) cli.Println(fmt.Sprintf("Run 'dolt migrate --pull %s' to update refs", remoteName)) @@ -191,7 +191,7 @@ func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.Arg return err } - cli.Println(color.YellowString(fmt.Sprintf("Pushing migrated branch %s to %s", branch.String(), remoteName))) + cli.Println(color.BlueString(fmt.Sprintf("Pushing migrated branch %s to %s", branch.String(), remoteName))) mode := ref.RefUpdateMode{Force: true} err = pushToRemoteBranch(ctx, dEnv, mode, src, dest, remoteRef, dEnv.DoltDB, destDB, remote) From b1efc94769ed2bbef7b6e1c05bf6f28602a239a1 Mon Sep 17 00:00:00 2001 From: Zach Musgrave Date: Tue, 14 Apr 2020 13:57:28 -0700 Subject: [PATCH 47/60] Fixed a typo Signed-off-by: Zach Musgrave --- bats/1pk5col-ints.bats | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bats/1pk5col-ints.bats b/bats/1pk5col-ints.bats index cc5c5acddf..fac09b3853 100755 --- a/bats/1pk5col-ints.bats +++ b/bats/1pk5col-ints.bats @@ -297,10 +297,9 @@ if rows[2] != "9,8,7,6,5,4".split(","): [[ ! "$output" =~ "|5" ]] || false dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (4,11,12,13,14,15)" run dolt sql -q "update test set c2=11,c3=11,c4=11,c5=11 where c1=11" + [ "$status" -eq 0 ] [[ "$output" =~ "Query OK, 2 rows affected" ]] || false [[ "$output" =~ "Rows matched: 2 Changed: 2 Warnings: 0" ]] || false - [ "$status" -eq 0 ] - [[ "$output" =~ "" ]] || false run dolt sql -q "select * from test" [ "$status" -eq 0 ] [[ "$output" =~ "11" ]] || false From a40ecccf4110e08e3798d7ee6df3acb733419005 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Tue, 14 Apr 2020 14:13:20 -0700 Subject: [PATCH 48/60] streaming map edits --- bats/compatibility/corona-virus-test.sh | 4 +-- go/libraries/doltcore/rebase/rebase_tag.go | 25 ++++++++----- .../table/typed/noms/noms_map_updater.go | 36 +++++++++++++++++++ 3 files changed, 55 insertions(+), 10 deletions(-) diff --git a/bats/compatibility/corona-virus-test.sh b/bats/compatibility/corona-virus-test.sh index 68b7c60ec8..b42aa07d34 100755 --- a/bats/compatibility/corona-virus-test.sh +++ b/bats/compatibility/corona-virus-test.sh @@ -60,7 +60,7 @@ function export_tables() { places do dolt table export "$table" "$table$1.csv" - dolt sql -r csv -q "select * from $table" > "$table$1.sql.csv" + dolt sql -r csv -q "select * from $table" | sed 's///g' > "$table$1.sql.csv" done } @@ -99,7 +99,7 @@ local_bin="`pwd`"/"$bin" PATH="$local_bin":"$PATH" dolt clone Liquidata/corona-virus pushd "corona-virus" PATH="$local_bin":"$PATH" export_tables "-pre" -dolt migrate +time dolt migrate export_tables "-post" diff_tables echo "success!" diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index 9fb20a28c5..371c16490a 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -17,6 +17,7 @@ package rebase import ( "context" "fmt" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/noms" "time" "github.com/liquidata-inc/dolt/go/libraries/doltcore/diff" @@ -404,7 +405,7 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent return nil, err } - rebasedRows, err := replayRowDiffs(ctx, rebasedSch, rows, parentRows, rebasedParentRows, tableMapping) + rebasedRows, err := replayRowDiffs(ctx, rebasedParentRoot.VRW(), rebasedSch, rows, parentRows, rebasedParentRows, tableMapping) if err != nil { return nil, err @@ -442,7 +443,7 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent return newRoot, nil } -func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, rebasedParentRows types.Map, tagMapping map[uint64]uint64) (types.Map, error) { +func replayRowDiffs(ctx context.Context, vrw types.ValueReadWriter, rSch schema.Schema, rows, parentRows, rebasedParentRows types.Map, tagMapping map[uint64]uint64) (types.Map, error) { unmappedTags := set.NewUint64Set(rSch.GetAllCols().Tags) tm := make(map[uint64]uint64) @@ -454,8 +455,7 @@ func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, r tm[t] = t } - // we will apply modified differences to the rebasedParent - rebasedRowEditor := rebasedParentRows.Edit() + nmu := noms.NewNomsMapUpdater(ctx, vrw, rebasedParentRows, rSch, func(stats types.AppliedEditStats) {}) ad := diff.NewAsyncDiffer(diffBufSize) // get all differences (including merges) between original commit and its parent @@ -486,16 +486,25 @@ func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, r switch d.ChangeType { case types.DiffChangeAdded: - rebasedRowEditor.Set(key, newVal) + err = nmu.WriteEdit(ctx, key, newVal) case types.DiffChangeRemoved: - rebasedRowEditor.Remove(key) + err = nmu.WriteEdit(ctx, key, nil) case types.DiffChangeModified: - rebasedRowEditor.Set(key, newVal) + err = nmu.WriteEdit(ctx, key, newVal) + } + + if err != nil { + return types.EmptyMap, err } } } - return rebasedRowEditor.Map(ctx) + err := nmu.Close(ctx) + if err != nil { + return types.EmptyMap, err + } + + return *nmu.GetMap(), nil } func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, rows types.Map, sch, parentSch schema.Schema) (types.Map, error) { diff --git a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go index ab5cd9a933..77cbc8ced6 100644 --- a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go +++ b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go @@ -132,6 +132,42 @@ func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error { return nil } + +// WriteRow will write a row to a table +func (nmu *NomsMapUpdater) WriteEdit(ctx context.Context, pk types.LesserValuable, fieldVals types.Valuable) error { + if nmu.acc == nil { + return errors.New("Attempting to write after closing.") + } + + if err := nmu.ae.Get(); err != nil { + return err + } + + err := func() error { + nmu.acc.AddEdit(pk, fieldVals) + nmu.count++ + + if nmu.count%maxEdits == 0 { + edits, err := nmu.acc.FinishedEditing() + + if err != nil { + return err + } + + nmu.mapChan <- edits + nmu.acc = types.CreateEditAccForMapEdits(nmu.vrw.Format()) + } + + return nil + }() + + if err != nil { + return err + } + + return nil +} + // Close should flush all writes, release resources being held func (nmu *NomsMapUpdater) Close(ctx context.Context) error { if nmu.result != nil { From 03c353335f7b585ad207cfbeddcbe3d8a976131e Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Tue, 14 Apr 2020 14:28:00 -0700 Subject: [PATCH 49/60] repo fmt --- go/libraries/doltcore/rebase/rebase_tag.go | 2 +- go/libraries/doltcore/table/typed/noms/noms_map_updater.go | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index 371c16490a..42091c6ab1 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -17,7 +17,6 @@ package rebase import ( "context" "fmt" - "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/noms" "time" "github.com/liquidata-inc/dolt/go/libraries/doltcore/diff" @@ -28,6 +27,7 @@ import ( "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema" "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/encoding" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed" + "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/noms" "github.com/liquidata-inc/dolt/go/libraries/utils/set" ndiff "github.com/liquidata-inc/dolt/go/store/diff" "github.com/liquidata-inc/dolt/go/store/hash" diff --git a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go index 77cbc8ced6..d0b0e03179 100644 --- a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go +++ b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go @@ -132,7 +132,6 @@ func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error { return nil } - // WriteRow will write a row to a table func (nmu *NomsMapUpdater) WriteEdit(ctx context.Context, pk types.LesserValuable, fieldVals types.Valuable) error { if nmu.acc == nil { From 75fb59d0a018e7647aa96de1cd63186b50dc7597 Mon Sep 17 00:00:00 2001 From: Daylon Wilkins Date: Tue, 14 Apr 2020 14:59:18 -0700 Subject: [PATCH 50/60] Implemented DECIMAL, TIME, ENUM, and SET, along with missing type aliases --- bats/types.bats | 533 ++++++++++++---- go/go.mod | 2 +- go/go.sum | 32 + .../doltcore/rowconv/row_converter.go | 2 +- .../schema/encoding/schema_marshaling_test.go | 24 +- .../doltcore/schema/typeinfo/common_test.go | 23 + .../doltcore/schema/typeinfo/decimal.go | 29 +- .../doltcore/schema/typeinfo/decimal_test.go | 595 ++++++++++++++++++ go/libraries/doltcore/schema/typeinfo/enum.go | 21 +- .../doltcore/schema/typeinfo/enum_test.go | 258 ++++++++ go/libraries/doltcore/schema/typeinfo/set.go | 23 +- .../doltcore/schema/typeinfo/set_test.go | 270 ++++++++ go/libraries/doltcore/schema/typeinfo/time.go | 36 +- .../doltcore/schema/typeinfo/time_test.go | 230 +++++++ .../doltcore/schema/typeinfo/typeinfo.go | 16 - .../doltcore/schema/typeinfo/typeinfo_test.go | 33 +- .../schema/typeinfo/varstring_test.go | 24 - go/libraries/doltcore/sqle/indexes.go | 78 +-- go/store/types/decimal.go | 109 ++++ go/store/types/decimal_test.go | 34 + go/store/types/map_test.go | 25 + go/store/types/noms_kind.go | 3 + go/store/types/type_test.go | 2 + 23 files changed, 2059 insertions(+), 343 deletions(-) create mode 100644 go/libraries/doltcore/schema/typeinfo/decimal_test.go create mode 100644 go/libraries/doltcore/schema/typeinfo/enum_test.go create mode 100644 go/libraries/doltcore/schema/typeinfo/set_test.go create mode 100644 go/libraries/doltcore/schema/typeinfo/time_test.go create mode 100644 go/store/types/decimal.go create mode 100644 go/store/types/decimal_test.go diff --git a/bats/types.bats b/bats/types.bats index ee1900f43a..8e2e5ca98f 100644 --- a/bats/types.bats +++ b/bats/types.bats @@ -12,14 +12,14 @@ teardown() { @test "types: BIGINT" { dolt sql < ./gen/proto/dolt/services/eventsapi -replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140 +replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee diff --git a/go/go.sum b/go/go.sum index 88a5a8fe68..3fa561762b 100644 --- a/go/go.sum +++ b/go/go.sum @@ -359,14 +359,46 @@ github.com/krishicks/yaml-patch v0.0.10/go.mod h1:Sm5TchwZS6sm7RJoyg87tzxm2ZcKzd github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/liquidata-inc/go-mysql-server v0.4.1-0.20200311072715-b12ae9d0cc97 h1:Zr78cjOfa0bM4X5JA692xhx3QvFPTsJiM0bD0xl/22Q= +github.com/liquidata-inc/go-mysql-server v0.4.1-0.20200311072715-b12ae9d0cc97/go.mod h1:Lh0pg7jnO08HxFm6oj6gtcSTUeeOTu4Spt50Aeo2mes= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6 h1:iKET+xfMh3NaiiIbrMBLi+MJ9hwmm++7DBtPGfarf50= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6 h1:iKET+xfMh3NaiiIbrMBLi+MJ9hwmm++7DBtPGfarf50= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4 h1:UIksBT7bRENT38ErKSz+auGLc7a5tDpCHwNhuajoJbU= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2 h1:l4mXLvgHMoihWuEqcmcJKEvQtAccHxhsKwkVn/okoxc= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd h1:SGGh7+XPqPYw3LaIK4VUvy/81Za1Y3p29lh4WDMtXh0= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd/go.mod h1:xu1cUi3vfWVJZ/9mQl9f8sdfJGobnS7kIucM3lfWIPk= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200412072052-a6583959dafb/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200412232521-1e406e8056fb/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414052025-88d3eff3f7f5 h1:Liiz/stNuLoWg1j1A/yGChITW4H/IbEyFwPGZPk+B8M= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414052025-88d3eff3f7f5/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0= github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140 h1:rxT0Pkt2ZLS0P4m8scQ3TATRjKYcntF6F0X5/yHcIDg= github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140/go.mod h1:tK/saWoda2x+KXyGsdVariMdfVOsjmRgQF2pbl4Mr1E= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e h1:cYKHqocy3oNkPmfayDwIswVy14Dcp8q5FFSYLS4FvIA= +github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e/go.mod h1:tK/saWoda2x+KXyGsdVariMdfVOsjmRgQF2pbl4Mr1E= github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0 h1:phMgajKClMUiIr+hF2LGt8KRuUa2Vd2GI1sNgHgSXoU= github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0/go.mod h1:YC1rI9k5gx8D02ljlbxDfZe80s/iq8bGvaaQsvR+qxs= github.com/liquidata-inc/mmap-go v1.0.3 h1:2LndAeAtup9rpvUmu4wZSYCsjCQ0Zpc+NqE+6+PnT7g= github.com/liquidata-inc/mmap-go v1.0.3/go.mod h1:w0doE7jfkuDEZyxb/zD3VWnRaQBYx1uDTS816kH8HoY= +github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1 h1:BDpmbvQ9I8npWe7TOzQcGkrn7EYHrW1hJtTd9h8MNZA= +github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU= +github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo= github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo= github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU= +github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU= +github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f h1:fqsJy7h3D3esm9tYSzU7LV6h2tfifdYTanPuDL5LJ1A= +github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= +github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54 h1:LR/OEhgIYVQuo5a/lxr8Ps76AZ1FNWUgNANfKCA0XSQ= +github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54 h1:LR/OEhgIYVQuo5a/lxr8Ps76AZ1FNWUgNANfKCA0XSQ= +github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= +github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= +github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9 h1:eaE6IFxMviaDSNFaKlTbNPA/+0Vhj/XgV6lG2SaoAWM= +github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= +github.com/liquidata-inc/vitess v0.0.0-20200410001601-55d11bea14ca h1:m09m0bRpTa3PCxMNcnRf5AiVK7ME0PVIci1vwuciZ5w= +github.com/liquidata-inc/vitess v0.0.0-20200410001601-55d11bea14ca/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee h1:r8ApUMNHHEyzRhPbuIHrWbr7FOTW4Yo5Sm1HpOEzPrQ= github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= diff --git a/go/libraries/doltcore/rowconv/row_converter.go b/go/libraries/doltcore/rowconv/row_converter.go index f85ceac3bb..bf9f5d39b1 100644 --- a/go/libraries/doltcore/rowconv/row_converter.go +++ b/go/libraries/doltcore/rowconv/row_converter.go @@ -190,7 +190,7 @@ func isNecessary(srcSch, destSch schema.Schema, destToSrc map[uint64]uint64) (bo return true, nil } - if srcCol.Kind != destCol.Kind { + if !srcCol.TypeInfo.Equals(destCol.TypeInfo) { return true, nil } } diff --git a/go/libraries/doltcore/schema/encoding/schema_marshaling_test.go b/go/libraries/doltcore/schema/encoding/schema_marshaling_test.go index 59d83dcc3f..8de313f555 100644 --- a/go/libraries/doltcore/schema/encoding/schema_marshaling_test.go +++ b/go/libraries/doltcore/schema/encoding/schema_marshaling_test.go @@ -118,12 +118,8 @@ func TestJSONMarshalling(t *testing.T) { func TestTypeInfoMarshalling(t *testing.T) { //TODO: determine the storage format for BINARY //TODO: determine the storage format for BLOB - //TODO: determine the storage format for DECIMAL - //TODO: determine the storage format for ENUM //TODO: determine the storage format for LONGBLOB //TODO: determine the storage format for MEDIUMBLOB - //TODO: determine the storage format for SET - //TODO: determine the storage format for TIME //TODO: determine the storage format for TINYBLOB //TODO: determine the storage format for VARBINARY sqlTypes := []sql.Type{ @@ -134,11 +130,11 @@ func TestTypeInfoMarshalling(t *testing.T) { //sql.Blob, //BLOB sql.Boolean, //BOOLEAN sql.MustCreateStringWithDefaults(sqltypes.Char, 10), //CHAR(10) - sql.Date, //DATE - sql.Datetime, //DATETIME - //sql.MustCreateDecimalType(9, 5), //DECIMAL(9, 5) - sql.Float64, //DOUBLE - //sql.MustCreateEnumType([]string{"a", "b", "c"}, sql.Collation_Default), //ENUM('a','b','c') + sql.Date, //DATE + sql.Datetime, //DATETIME + sql.MustCreateDecimalType(9, 5), //DECIMAL(9, 5) + sql.Float64, //DOUBLE + sql.MustCreateEnumType([]string{"a", "b", "c"}, sql.Collation_Default), //ENUM('a','b','c') sql.Float32, //FLOAT sql.Int32, //INT sql.Uint32, //INT UNSIGNED @@ -148,11 +144,11 @@ func TestTypeInfoMarshalling(t *testing.T) { sql.Int24, //MEDIUMINT sql.Uint24, //MEDIUMINT UNSIGNED sql.MediumText, //MEDIUMTEXT - //sql.MustCreateSetType([]string{"a", "b", "c"}, sql.Collation_Default), //SET('a','b','c') - sql.Int16, //SMALLINT - sql.Uint16, //SMALLINT UNSIGNED - sql.Text, //TEXT - //sql.Time, //TIME + sql.MustCreateSetType([]string{"a", "b", "c"}, sql.Collation_Default), //SET('a','b','c') + sql.Int16, //SMALLINT + sql.Uint16, //SMALLINT UNSIGNED + sql.Text, //TEXT + sql.Time, //TIME sql.Timestamp, //TIMESTAMP //sql.TinyBlob, //TINYBLOB sql.Int8, //TINYINT diff --git a/go/libraries/doltcore/schema/typeinfo/common_test.go b/go/libraries/doltcore/schema/typeinfo/common_test.go index 351690710e..a973a9171d 100644 --- a/go/libraries/doltcore/schema/typeinfo/common_test.go +++ b/go/libraries/doltcore/schema/typeinfo/common_test.go @@ -135,6 +135,29 @@ func generateVarBinaryType(t *testing.T, length int64, pad bool) *varBinaryType return &varBinaryType{sql.MustCreateBinary(sqltypes.VarBinary, length)} } +func generateVarStringTypes(t *testing.T, numOfTypes uint16) []TypeInfo { + var res []TypeInfo + loop(t, 1, 500, numOfTypes, func(i int64) { + rts := false + if i%2 == 0 { + rts = true + } + res = append(res, generateVarStringType(t, i, rts)) + }) + return res +} + +func generateVarStringType(t *testing.T, length int64, rts bool) *varStringType { + require.True(t, length > 0) + if rts { + t, err := sql.CreateStringWithDefaults(sqltypes.Char, length) + if err == nil { + return &varStringType{t} + } + } + return &varStringType{sql.MustCreateStringWithDefaults(sqltypes.VarChar, length)} +} + func loop(t *testing.T, start int64, endInclusive int64, numOfSteps uint16, loopedFunc func(int64)) { require.True(t, endInclusive > start) maxNumOfSteps := endInclusive - start + 1 diff --git a/go/libraries/doltcore/schema/typeinfo/decimal.go b/go/libraries/doltcore/schema/typeinfo/decimal.go index 1c75fb4643..fa3841e267 100644 --- a/go/libraries/doltcore/schema/typeinfo/decimal.go +++ b/go/libraries/doltcore/schema/typeinfo/decimal.go @@ -18,6 +18,7 @@ import ( "fmt" "strconv" + "github.com/shopspring/decimal" "github.com/src-d/go-mysql-server/sql" "github.com/liquidata-inc/dolt/go/store/types" @@ -66,12 +67,8 @@ func CreateDecimalTypeFromParams(params map[string]string) (TypeInfo, error) { // ConvertNomsValueToValue implements TypeInfo interface. func (ti *decimalType) ConvertNomsValueToValue(v types.Value) (interface{}, error) { - if val, ok := v.(types.String); ok { - res, err := ti.sqlDecimalType.Convert(string(val)) - if err != nil { - return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val) - } - return res, nil + if val, ok := v.(types.Decimal); ok { + return ti.sqlDecimalType.Convert(decimal.Decimal(val)) } if _, ok := v.(types.Null); ok || v == nil { return nil, nil @@ -84,15 +81,14 @@ func (ti *decimalType) ConvertValueToNomsValue(v interface{}) (types.Value, erro if v == nil { return types.NullValue, nil } - strVal, err := ti.sqlDecimalType.Convert(v) + decVal, err := ti.sqlDecimalType.ConvertToDecimal(v) if err != nil { return nil, err } - val, ok := strVal.(string) - if ok { - return types.String(val), nil + if !decVal.Valid { + return nil, fmt.Errorf(`"%v" has unexpectedly encountered a null value from embedded type`, ti.String()) } - return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v) + return types.Decimal(decVal.Decimal), nil } // Equals implements TypeInfo interface. @@ -144,7 +140,7 @@ func (ti *decimalType) IsValid(v types.Value) bool { // NomsKind implements TypeInfo interface. func (ti *decimalType) NomsKind() types.NomsKind { - return types.StringKind + return types.DecimalKind } // ParseValue implements TypeInfo interface. @@ -152,14 +148,7 @@ func (ti *decimalType) ParseValue(str *string) (types.Value, error) { if str == nil || *str == "" { return types.NullValue, nil } - strVal, err := ti.sqlDecimalType.Convert(*str) - if err != nil { - return nil, err - } - if val, ok := strVal.(string); ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str) + return ti.ConvertValueToNomsValue(*str) } // String implements TypeInfo interface. diff --git a/go/libraries/doltcore/schema/typeinfo/decimal_test.go b/go/libraries/doltcore/schema/typeinfo/decimal_test.go new file mode 100644 index 0000000000..7ae6179bed --- /dev/null +++ b/go/libraries/doltcore/schema/typeinfo/decimal_test.go @@ -0,0 +1,595 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeinfo + +import ( + "fmt" + "math/big" + "testing" + "time" + + "github.com/shopspring/decimal" + "github.com/src-d/go-mysql-server/sql" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/liquidata-inc/dolt/go/store/types" +) + +func TestDecimalConvertNomsValueToValue(t *testing.T) { + tests := []struct { + typ *decimalType + input types.Decimal + output string + expectedErr bool + }{ + { + generateDecimalType(t, 1, 0), + types.Decimal(decimal.RequireFromString("0")), + "0", + false, + }, + { + generateDecimalType(t, 1, 0), + types.Decimal(decimal.RequireFromString("-1.5")), + "-2", + false, + }, + { + generateDecimalType(t, 2, 1), + types.Decimal(decimal.RequireFromString("-1.5")), + "-1.5", + false, + }, + { + generateDecimalType(t, 5, 4), + types.Decimal(decimal.RequireFromString("-5.7159")), + "-5.7159", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("4723245")), + "4723245.00", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("4723245.01")), + "4723245.01", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("14723245.01")), + "", + true, + }, + { + generateDecimalType(t, 5, 4), + types.Decimal(decimal.RequireFromString("55.7159")), + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertNomsValueToValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, output) + } + }) + } +} + +func TestDecimalConvertValueToNomsValue(t *testing.T) { + tests := []struct { + typ *decimalType + input interface{} + output types.Decimal + expectedErr bool + }{ + { + generateDecimalType(t, 1, 0), + 7, + types.Decimal(decimal.RequireFromString("7")), + false, + }, + { + generateDecimalType(t, 5, 1), + -4.5, + types.Decimal(decimal.RequireFromString("-4.5")), + false, + }, + { + generateDecimalType(t, 10, 0), + "77", + types.Decimal(decimal.RequireFromString("77")), + false, + }, + { + generateDecimalType(t, 5, 0), + "dog", + types.Decimal{}, + true, + }, + { + generateDecimalType(t, 15, 7), + true, + types.Decimal{}, + true, + }, + { + generateDecimalType(t, 20, 5), + time.Unix(137849, 0), + types.Decimal{}, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertValueToNomsValue(test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.True(t, test.output.Equals(output)) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestDecimalFormatValue(t *testing.T) { + tests := []struct { + typ *decimalType + input types.Decimal + output string + expectedErr bool + }{ + { + generateDecimalType(t, 1, 0), + types.Decimal(decimal.RequireFromString("0")), + "0", + false, + }, + { + generateDecimalType(t, 1, 0), + types.Decimal(decimal.RequireFromString("-1.5")), + "-2", + false, + }, + { + generateDecimalType(t, 2, 1), + types.Decimal(decimal.RequireFromString("-1.5")), + "-1.5", + false, + }, + { + generateDecimalType(t, 5, 4), + types.Decimal(decimal.RequireFromString("-5.7159")), + "-5.7159", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("4723245")), + "4723245.00", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("4723245.01")), + "4723245.01", + false, + }, + { + generateDecimalType(t, 9, 2), + types.Decimal(decimal.RequireFromString("14723245.01")), + "", + true, + }, + { + generateDecimalType(t, 5, 4), + types.Decimal(decimal.RequireFromString("55.7159")), + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.FormatValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, *output) + } + }) + } +} + +func TestDecimalParseValue(t *testing.T) { + tests := []struct { + typ *decimalType + input string + output types.Decimal + expectedErr bool + }{ + { + generateDecimalType(t, 1, 0), + "0", + types.Decimal(decimal.RequireFromString("0")), + false, + }, + { + generateDecimalType(t, 1, 0), + "-1.5", + types.Decimal(decimal.RequireFromString("-2")), + false, + }, + { + generateDecimalType(t, 2, 1), + "-1.5", + types.Decimal(decimal.RequireFromString("-1.5")), + false, + }, + { + generateDecimalType(t, 5, 4), + "-5.7159", + types.Decimal(decimal.RequireFromString("-5.7159")), + false, + }, + { + generateDecimalType(t, 9, 2), + "4723245.00", + types.Decimal(decimal.RequireFromString("4723245.00")), + false, + }, + { + generateDecimalType(t, 13, 2), + "4723245.01", + types.Decimal(decimal.RequireFromString("4723245.01")), + false, + }, + { + generateDecimalType(t, 9, 2), + "24723245.01", + types.Decimal{}, + true, + }, + { + generateDecimalType(t, 5, 4), + "-44.2841", + types.Decimal{}, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ParseValue(&test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.True(t, test.output.Equals(output)) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestDecimalMarshal(t *testing.T) { + tests := []struct { + precision uint8 + scale uint8 + val interface{} + expectedVal string + expectedErr bool + }{ + {1, 0, byte(0), "0", false}, + {1, 0, int8(3), "3", false}, + {1, 0, "-3.7e0", "-4", false}, + {1, 0, uint(4), "4", false}, + {1, 0, int16(9), "9", false}, + {1, 0, "0.00000000000000000003e20", "3", false}, + {1, 0, float64(-9.4), "-9", false}, + {1, 0, float32(9.5), "", true}, + {1, 0, int32(-10), "", true}, + + {1, 1, 0, "0.0", false}, + {1, 1, .01, "0.0", false}, + {1, 1, .1, "0.1", false}, + {1, 1, ".22", "0.2", false}, + {1, 1, .55, "0.6", false}, + {1, 1, "-.7863294659345624", "-0.8", false}, + {1, 1, "2634193746329327479.32030573792e-19", "0.3", false}, + {1, 1, 1, "", true}, + {1, 1, new(big.Rat).SetInt64(2), "", true}, + + {5, 0, 0, "0", false}, + {5, 0, -5, "-5", false}, + {5, 0, -99995, "-99995", false}, + {5, 0, 5000.2, "5000", false}, + {5, 0, "7742", "7742", false}, + {5, 0, new(big.Float).SetFloat64(-4723.875), "-4724", false}, + {5, 0, 99999, "99999", false}, + {5, 0, "0xf8e1", "63713", false}, + {5, 0, "0b1001110101100110", "40294", false}, + {5, 0, new(big.Rat).SetFrac64(999999, 10), "", true}, + {5, 0, 673927, "", true}, + + {10, 5, 0, "0.00000", false}, + {10, 5, "25.1", "25.10000", false}, + {10, 5, "-25.1", "-25.10000", false}, + {10, 5, "-99205.8572", "-99205.85720", false}, + {10, 5, "99999.999994", "99999.99999", false}, + {10, 5, "5.5729136e3", "5572.91360", false}, + {10, 5, "600e-2", "6.00000", false}, + {10, 5, new(big.Rat).SetFrac64(-22, 7), "-3.14286", false}, + {10, 5, "-99995.1", "-99995.10000", false}, + {10, 5, 100000, "", true}, + {10, 5, "-99999.999995", "", true}, + + {65, 0, "99999999999999999999999999999999999999999999999999999999999999999", + "99999999999999999999999999999999999999999999999999999999999999999", false}, + {65, 0, "99999999999999999999999999999999999999999999999999999999999999999.1", + "99999999999999999999999999999999999999999999999999999999999999999", false}, + {65, 0, "99999999999999999999999999999999999999999999999999999999999999999.99", "", true}, + + {65, 12, "16976349273982359874209023948672021737840592720387475.2719128737543572927374503832837350563300243035038234972093785", + "16976349273982359874209023948672021737840592720387475.271912873754", false}, + {65, 12, "99999999999999999999999999999999999999999999999999999.9999999999999", "", true}, + + {20, 10, []byte{32}, "", true}, + {20, 10, time.Date(2019, 12, 12, 12, 12, 12, 0, time.UTC), "", true}, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("%v %v %v", test.precision, test.scale, test.val), func(t *testing.T) { + typ := &decimalType{sql.MustCreateDecimalType(test.precision, test.scale)} + val, err := typ.ConvertValueToNomsValue(test.val) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, test.expectedVal, typ.sqlDecimalType.MustConvert(decimal.Decimal(val.(types.Decimal)))) + umar, err := typ.ConvertNomsValueToValue(val) + require.NoError(t, err) + testVal := typ.sqlDecimalType.MustConvert(test.val) + cmp, err := typ.sqlDecimalType.Compare(testVal, umar) + require.NoError(t, err) + assert.Equal(t, 0, cmp) + } + }) + } +} + +func TestDecimalRoundTrip(t *testing.T) { + tests := []struct { + typ *decimalType + input string + output string + expectedErr bool + }{ + { + generateDecimalType(t, 1, 0), + "0", + "0", + false, + }, + { + generateDecimalType(t, 4, 1), + "0", + "0.0", + false, + }, + { + generateDecimalType(t, 9, 4), + "0", + "0.0000", + false, + }, + { + generateDecimalType(t, 26, 0), + "0", + "0", + false, + }, + { + generateDecimalType(t, 48, 22), + "0", + "0.0000000000000000000000", + false, + }, + { + generateDecimalType(t, 65, 30), + "0", + "0.000000000000000000000000000000", + false, + }, + { + generateDecimalType(t, 1, 0), + "-1.5", + "-2", + false, + }, + { + generateDecimalType(t, 4, 1), + "-1.5", + "-1.5", + false, + }, + { + generateDecimalType(t, 9, 4), + "-1.5", + "-1.5000", + false, + }, + { + generateDecimalType(t, 26, 0), + "-1.5", + "-2", + false, + }, + { + generateDecimalType(t, 48, 22), + "-1.5", + "-1.5000000000000000000000", + false, + }, + { + generateDecimalType(t, 65, 30), + "-1.5", + "-1.500000000000000000000000000000", + false, + }, + { + generateDecimalType(t, 1, 0), + "9351580", + "", + true, + }, + { + generateDecimalType(t, 4, 1), + "9351580", + "", + true, + }, + { + generateDecimalType(t, 9, 4), + "9351580", + "", + true, + }, + { + generateDecimalType(t, 26, 0), + "9351580", + "9351580", + false, + }, + { + generateDecimalType(t, 48, 22), + "9351580", + "9351580.0000000000000000000000", + false, + }, + { + generateDecimalType(t, 65, 30), + "9351580", + "9351580.000000000000000000000000000000", + false, + }, + { + generateDecimalType(t, 1, 0), + "-1076416.875", + "", + true, + }, + { + generateDecimalType(t, 4, 1), + "-1076416.875", + "", + true, + }, + { + generateDecimalType(t, 9, 4), + "-1076416.875", + "", + true, + }, + { + generateDecimalType(t, 26, 0), + "-1076416.875", + "-1076417", + false, + }, + { + generateDecimalType(t, 48, 22), + "-1076416.875", + "-1076416.8750000000000000000000", + false, + }, + { + generateDecimalType(t, 65, 30), + "-1076416.875", + "-1076416.875000000000000000000000000000", + false, + }, + { + generateDecimalType(t, 1, 0), + "198728394234798423466321.27349757", + "", + true, + }, + { + generateDecimalType(t, 4, 1), + "198728394234798423466321.27349757", + "", + true, + }, + { + generateDecimalType(t, 9, 4), + "198728394234798423466321.27349757", + "", + true, + }, + { + generateDecimalType(t, 26, 0), + "198728394234798423466321.27349757", + "198728394234798423466321", + false, + }, + { + generateDecimalType(t, 48, 22), + "198728394234798423466321.27349757", + "198728394234798423466321.2734975700000000000000", + false, + }, + { + generateDecimalType(t, 65, 30), + "198728394234798423466321.27349757", + "198728394234798423466321.273497570000000000000000000000", + false, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v %v`, test.typ.String(), test.input, test.output), func(t *testing.T) { + parsed, err := test.typ.ConvertValueToNomsValue(test.input) + if !test.expectedErr { + require.NoError(t, err) + output, err := test.typ.ConvertNomsValueToValue(parsed) + require.NoError(t, err) + assert.Equal(t, test.output, output) + parsed2, err := test.typ.ParseValue(&test.input) + require.NoError(t, err) + assert.Equal(t, parsed, parsed2) + output2, err := test.typ.FormatValue(parsed2) + require.NoError(t, err) + assert.Equal(t, test.output, *output2) + } else { + assert.Error(t, err) + _, err = test.typ.ParseValue(&test.input) + assert.Error(t, err) + } + }) + } +} diff --git a/go/libraries/doltcore/schema/typeinfo/enum.go b/go/libraries/doltcore/schema/typeinfo/enum.go index 6a699f6e8c..75c353972b 100644 --- a/go/libraries/doltcore/schema/typeinfo/enum.go +++ b/go/libraries/doltcore/schema/typeinfo/enum.go @@ -67,8 +67,8 @@ func CreateEnumTypeFromParams(params map[string]string) (TypeInfo, error) { // ConvertNomsValueToValue implements TypeInfo interface. func (ti *enumType) ConvertNomsValueToValue(v types.Value) (interface{}, error) { - if val, ok := v.(types.String); ok { - res, err := ti.sqlEnumType.Convert(string(val)) + if val, ok := v.(types.Uint); ok { + res, err := ti.sqlEnumType.Unmarshal(int64(val)) if err != nil { return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val) } @@ -85,15 +85,11 @@ func (ti *enumType) ConvertValueToNomsValue(v interface{}) (types.Value, error) if v == nil { return types.NullValue, nil } - strVal, err := ti.sqlEnumType.Convert(v) + val, err := ti.sqlEnumType.Marshal(v) if err != nil { return nil, err } - val, ok := strVal.(string) - if ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v) + return types.Uint(val), nil } // Equals implements TypeInfo interface. @@ -158,7 +154,7 @@ func (ti *enumType) IsValid(v types.Value) bool { // NomsKind implements TypeInfo interface. func (ti *enumType) NomsKind() types.NomsKind { - return types.StringKind + return types.UintKind } // ParseValue implements TypeInfo interface. @@ -166,14 +162,11 @@ func (ti *enumType) ParseValue(str *string) (types.Value, error) { if str == nil || *str == "" { return types.NullValue, nil } - strVal, err := ti.sqlEnumType.Convert(*str) + val, err := ti.sqlEnumType.Marshal(*str) if err != nil { return nil, err } - if val, ok := strVal.(string); ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str) + return types.Uint(val), nil } // String implements TypeInfo interface. diff --git a/go/libraries/doltcore/schema/typeinfo/enum_test.go b/go/libraries/doltcore/schema/typeinfo/enum_test.go new file mode 100644 index 0000000000..264ad85cf6 --- /dev/null +++ b/go/libraries/doltcore/schema/typeinfo/enum_test.go @@ -0,0 +1,258 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeinfo + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/liquidata-inc/dolt/go/store/types" +) + +func TestEnumConvertNomsValueToValue(t *testing.T) { + tests := []struct { + typ *enumType + input types.Uint + output string + expectedErr bool + }{ + { + generateEnumType(t, 3), + 1, + "aaaa", + false, + }, + { + generateEnumType(t, 5), + 2, + "aaab", + false, + }, + { + generateEnumType(t, 8), + 3, + "aaac", + false, + }, + { + generateEnumType(t, 7), + 7, + "aaag", + false, + }, + { + generateEnumType(t, 2), + 0, + "", + true, + }, + { + generateEnumType(t, 3), + 4, + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertNomsValueToValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, output) + } + }) + } +} + +func TestEnumConvertValueToNomsValue(t *testing.T) { + tests := []struct { + typ *enumType + input interface{} + output types.Uint + expectedErr bool + }{ + { + generateEnumType(t, 4), + "aaac", + 3, + false, + }, + { + generateEnumType(t, 7), + uint64(3), + 3, + false, + }, + { + generateEnumType(t, 4), + "dog", + 0, + true, + }, + { + generateEnumType(t, 3), + true, + 0, + true, + }, + { + generateEnumType(t, 10), + time.Unix(137849, 0), + 0, + true, + }, + { + generateEnumType(t, 5), + complex128(14i), + 0, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertValueToNomsValue(test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestEnumFormatValue(t *testing.T) { + tests := []struct { + typ *enumType + input types.Uint + output string + expectedErr bool + }{ + { + generateEnumType(t, 3), + 1, + "aaaa", + false, + }, + { + generateEnumType(t, 5), + 2, + "aaab", + false, + }, + { + generateEnumType(t, 8), + 3, + "aaac", + false, + }, + { + generateEnumType(t, 7), + 7, + "aaag", + false, + }, + { + generateEnumType(t, 2), + 0, + "", + true, + }, + { + generateEnumType(t, 3), + 4, + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.FormatValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, *output) + } + }) + } +} + +func TestEnumParseValue(t *testing.T) { + tests := []struct { + typ *enumType + input string + output types.Uint + expectedErr bool + }{ + { + generateEnumType(t, 3), + "aaaa", + 1, + false, + }, + { + generateEnumType(t, 5), + "aaab", + 2, + false, + }, + { + generateEnumType(t, 8), + "aaac", + 3, + false, + }, + { + generateEnumType(t, 7), + "aaag", + 7, + false, + }, + { + generateEnumType(t, 2), + "dog", + 0, + true, + }, + { + generateEnumType(t, 3), + "aaad", + 4, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ParseValue(&test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} diff --git a/go/libraries/doltcore/schema/typeinfo/set.go b/go/libraries/doltcore/schema/typeinfo/set.go index 4d845ac823..2a7cb9982d 100644 --- a/go/libraries/doltcore/schema/typeinfo/set.go +++ b/go/libraries/doltcore/schema/typeinfo/set.go @@ -67,8 +67,8 @@ func CreateSetTypeFromParams(params map[string]string) (TypeInfo, error) { // ConvertNomsValueToValue implements TypeInfo interface. func (ti *setType) ConvertNomsValueToValue(v types.Value) (interface{}, error) { - if val, ok := v.(types.String); ok { - res, err := ti.sqlSetType.Convert(string(val)) + if val, ok := v.(types.Uint); ok { + res, err := ti.sqlSetType.Unmarshal(uint64(val)) if err != nil { return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val) } @@ -85,15 +85,11 @@ func (ti *setType) ConvertValueToNomsValue(v interface{}) (types.Value, error) { if v == nil { return types.NullValue, nil } - strVal, err := ti.sqlSetType.Convert(v) + val, err := ti.sqlSetType.Marshal(v) if err != nil { return nil, err } - val, ok := strVal.(string) - if ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert value "%v" of type "%T" as it is invalid`, ti.String(), v, v) + return types.Uint(val), nil } // Equals implements TypeInfo interface. @@ -158,22 +154,19 @@ func (ti *setType) IsValid(v types.Value) bool { // NomsKind implements TypeInfo interface. func (ti *setType) NomsKind() types.NomsKind { - return types.StringKind + return types.UintKind } // ParseValue implements TypeInfo interface. func (ti *setType) ParseValue(str *string) (types.Value, error) { - if str == nil || *str == "" { + if str == nil { return types.NullValue, nil } - strVal, err := ti.sqlSetType.Convert(*str) + val, err := ti.sqlSetType.Marshal(*str) if err != nil { return nil, err } - if val, ok := strVal.(string); ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str) + return types.Uint(val), nil } // String implements TypeInfo interface. diff --git a/go/libraries/doltcore/schema/typeinfo/set_test.go b/go/libraries/doltcore/schema/typeinfo/set_test.go new file mode 100644 index 0000000000..c7120675a1 --- /dev/null +++ b/go/libraries/doltcore/schema/typeinfo/set_test.go @@ -0,0 +1,270 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeinfo + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/liquidata-inc/dolt/go/store/types" +) + +func TestSetConvertNomsValueToValue(t *testing.T) { + tests := []struct { + typ *setType + input types.Uint + output string + expectedErr bool + }{ + { + generateSetType(t, 2), + 0, + "", + false, + }, + { + generateSetType(t, 3), + 1, + "aa", + false, + }, + { + generateSetType(t, 5), + 2, + "ab", + false, + }, + { + generateSetType(t, 8), + 3, + "aa,ab", + false, + }, + { + generateSetType(t, 7), + 4, + "ac", + false, + }, + { + generateSetType(t, 4), + 7, + "aa,ab,ac", + false, + }, + { + generateSetType(t, 3), + 8, + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertNomsValueToValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, output) + } + }) + } +} + +func TestSetConvertValueToNomsValue(t *testing.T) { + tests := []struct { + typ *setType + input interface{} + output types.Uint + expectedErr bool + }{ + { + generateSetType(t, 4), + "aa,ab", + 3, + false, + }, + { + generateSetType(t, 7), + uint64(3), + 3, + false, + }, + { + generateSetType(t, 3), + true, + 0, + true, + }, + { + generateSetType(t, 10), + time.Unix(137849, 0), + 0, + true, + }, + { + generateSetType(t, 5), + complex128(14i), + 0, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ConvertValueToNomsValue(test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestSetFormatValue(t *testing.T) { + tests := []struct { + typ *setType + input types.Uint + output string + expectedErr bool + }{ + { + generateSetType(t, 2), + 0, + "", + false, + }, + { + generateSetType(t, 3), + 1, + "aa", + false, + }, + { + generateSetType(t, 5), + 2, + "ab", + false, + }, + { + generateSetType(t, 8), + 3, + "aa,ab", + false, + }, + { + generateSetType(t, 7), + 4, + "ac", + false, + }, + { + generateSetType(t, 4), + 7, + "aa,ab,ac", + false, + }, + { + generateSetType(t, 3), + 8, + "", + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.FormatValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, *output) + } + }) + } +} + +func TestSetParseValue(t *testing.T) { + tests := []struct { + typ *setType + input string + output types.Uint + expectedErr bool + }{ + { + generateSetType(t, 2), + "", + 0, + false, + }, + { + generateSetType(t, 3), + "aa", + 1, + false, + }, + { + generateSetType(t, 5), + "ab", + 2, + false, + }, + { + generateSetType(t, 8), + "aa,ab", + 3, + false, + }, + { + generateSetType(t, 7), + "ac", + 4, + false, + }, + { + generateSetType(t, 4), + "aa,ab,ac", + 7, + false, + }, + { + generateSetType(t, 3), + "ad", + 0, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) { + output, err := test.typ.ParseValue(&test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} diff --git a/go/libraries/doltcore/schema/typeinfo/time.go b/go/libraries/doltcore/schema/typeinfo/time.go index 4eb11508bf..a61d184aed 100644 --- a/go/libraries/doltcore/schema/typeinfo/time.go +++ b/go/libraries/doltcore/schema/typeinfo/time.go @@ -34,9 +34,8 @@ var TimeType = &timeType{sql.Time} // ConvertNomsValueToValue implements TypeInfo interface. func (ti *timeType) ConvertNomsValueToValue(v types.Value) (interface{}, error) { - //TODO: expose the MySQL type's microsecond implementation and persist that to disk? Enables sorting - if val, ok := v.(types.String); ok { - return string(val), nil + if val, ok := v.(types.Int); ok { + return ti.sqlTimeType.Unmarshal(int64(val)), nil } if _, ok := v.(types.Null); ok || v == nil { return nil, nil @@ -49,15 +48,11 @@ func (ti *timeType) ConvertValueToNomsValue(v interface{}) (types.Value, error) if v == nil { return types.NullValue, nil } - strVal, err := ti.sqlTimeType.Convert(v) + val, err := ti.sqlTimeType.Marshal(v) if err != nil { return nil, err } - val, ok := strVal.(string) - if ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert value "%v" of type "%T" as it is invalid`, ti.String(), v, v) + return types.Int(val), nil } // Equals implements TypeInfo interface. @@ -71,14 +66,18 @@ func (ti *timeType) Equals(other TypeInfo) bool { // FormatValue implements TypeInfo interface. func (ti *timeType) FormatValue(v types.Value) (*string, error) { - if val, ok := v.(types.String); ok { - res := string(val) - return &res, nil - } if _, ok := v.(types.Null); ok || v == nil { return nil, nil } - return nil, fmt.Errorf(`"%v" cannot convert NomsKind "%v" to a string`, ti.String(), v.Kind()) + strVal, err := ti.ConvertNomsValueToValue(v) + if err != nil { + return nil, err + } + val, ok := strVal.(string) + if !ok { + return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v) + } + return &val, nil } // GetTypeIdentifier implements TypeInfo interface. @@ -99,7 +98,7 @@ func (ti *timeType) IsValid(v types.Value) bool { // NomsKind implements TypeInfo interface. func (ti *timeType) NomsKind() types.NomsKind { - return types.StringKind + return types.IntKind } // ParseValue implements TypeInfo interface. @@ -107,14 +106,11 @@ func (ti *timeType) ParseValue(str *string) (types.Value, error) { if str == nil || *str == "" { return types.NullValue, nil } - strVal, err := ti.sqlTimeType.Convert(*str) + val, err := ti.sqlTimeType.Marshal(*str) if err != nil { return nil, err } - if val, ok := strVal.(string); ok { - return types.String(val), nil - } - return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str) + return types.Int(val), nil } // String implements TypeInfo interface. diff --git a/go/libraries/doltcore/schema/typeinfo/time_test.go b/go/libraries/doltcore/schema/typeinfo/time_test.go new file mode 100644 index 0000000000..574fae9cd4 --- /dev/null +++ b/go/libraries/doltcore/schema/typeinfo/time_test.go @@ -0,0 +1,230 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeinfo + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/liquidata-inc/dolt/go/store/types" +) + +func TestTimeConvertNomsValueToValue(t *testing.T) { + tests := []struct { + input types.Int + output string + expectedErr bool + }{ + { + 1000000, + "00:00:01", + false, + }, + { + 113000000, + "00:01:53", + false, + }, + { + 247019000000, + "68:36:59", + false, + }, + { + 458830485214, + "127:27:10.485214", + false, + }, + { + -3020399000000, + "-838:59:59", + false, + }, + { // no integer can cause an error, values beyond the max/min are set equal to the max/min + 922337203685477580, + "838:59:59", + false, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) { + output, err := TimeType.ConvertNomsValueToValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, output) + } + }) + } +} + +func TestTimeConvertValueToNomsValue(t *testing.T) { + tests := []struct { + input interface{} + output types.Int + expectedErr bool + }{ + { + 153, + 113000000, + false, + }, + { + 1.576, + 1576000, + false, + }, + { + "68:36:59", + 247019000000, + false, + }, + { + "683659", + 247019000000, + false, + }, + { + "dog", + 0, + true, + }, + { + true, + 0, + true, + }, + { + time.Unix(137849, 0), + 0, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) { + output, err := TimeType.ConvertValueToNomsValue(test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestTimeFormatValue(t *testing.T) { + tests := []struct { + input types.Int + output string + expectedErr bool + }{ + { + 1000000, + "00:00:01", + false, + }, + { + 113000000, + "00:01:53", + false, + }, + { + 247019000000, + "68:36:59", + false, + }, + { + 458830485214, + "127:27:10.485214", + false, + }, + { + -3020399000000, + "-838:59:59", + false, + }, + { + 922337203685477580, + "838:59:59", + false, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) { + output, err := TimeType.FormatValue(test.input) + if test.expectedErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.output, *output) + } + }) + } +} + +func TestTimeParseValue(t *testing.T) { + tests := []struct { + input string + output types.Int + expectedErr bool + }{ + { + "683659", + 247019000000, + false, + }, + { + "127:27:10.485214", + 458830485214, + false, + }, + { + "-838:59:59", + -3020399000000, + false, + }, + { + "850:00:00", + 3020399000000, + false, + }, + { + "dog", + 0, + true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) { + output, err := TimeType.ParseValue(&test.input) + if !test.expectedErr { + require.NoError(t, err) + assert.Equal(t, test.output, output) + } else { + assert.Error(t, err) + } + }) + } +} diff --git a/go/libraries/doltcore/schema/typeinfo/typeinfo.go b/go/libraries/doltcore/schema/typeinfo/typeinfo.go index 0932dfe178..39b1f621d8 100644 --- a/go/libraries/doltcore/schema/typeinfo/typeinfo.go +++ b/go/libraries/doltcore/schema/typeinfo/typeinfo.go @@ -139,20 +139,12 @@ func FromSqlType(sqlType sql.Type) (TypeInfo, error) { case sqltypes.Date: return DateType, nil case sqltypes.Time: - //TODO: determine the storage format - if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code - return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String()) - } return TimeType, nil case sqltypes.Datetime: return DatetimeType, nil case sqltypes.Year: return YearType, nil case sqltypes.Decimal: - //TODO: determine the storage format - if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code - return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String()) - } decimalSQLType, ok := sqlType.(sql.DecimalType) if !ok { return nil, fmt.Errorf(`expected "DecimalTypeIdentifier" from SQL basetype "Decimal"`) @@ -213,20 +205,12 @@ func FromSqlType(sqlType sql.Type) (TypeInfo, error) { } return &bitType{bitSQLType}, nil case sqltypes.Enum: - //TODO: determine the storage format - if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code - return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String()) - } enumSQLType, ok := sqlType.(sql.EnumType) if !ok { return nil, fmt.Errorf(`expected "EnumTypeIdentifier" from SQL basetype "Enum"`) } return &enumType{enumSQLType}, nil case sqltypes.Set: - //TODO: determine the storage format - if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code - return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String()) - } setSQLType, ok := sqlType.(sql.SetType) if !ok { return nil, fmt.Errorf(`expected "SetTypeIdentifier" from SQL basetype "Set"`) diff --git a/go/libraries/doltcore/schema/typeinfo/typeinfo_test.go b/go/libraries/doltcore/schema/typeinfo/typeinfo_test.go index f54cdb84b7..88abc02667 100644 --- a/go/libraries/doltcore/schema/typeinfo/typeinfo_test.go +++ b/go/libraries/doltcore/schema/typeinfo/typeinfo_test.go @@ -19,6 +19,7 @@ import ( "testing" "time" + "github.com/shopspring/decimal" "github.com/src-d/go-mysql-server/sql" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -68,8 +69,7 @@ func verifyTypeInfoArrays(t *testing.T, tiArrays [][]TypeInfo, vaArrays [][]type // delete any types that should not be tested delete(seenTypeInfos, UnknownTypeIdentifier) delete(seenTypeInfos, TupleTypeIdentifier) - //TODO: determine the storage format for DecimalType and VarBinaryType - delete(seenTypeInfos, DecimalTypeIdentifier) + //TODO: determine the storage format for VarBinaryType delete(seenTypeInfos, VarBinaryTypeIdentifier) for _, tiArray := range tiArrays { // no row should be empty @@ -120,7 +120,7 @@ func testTypeInfoConvertRoundTrip(t *testing.T, tiArrays [][]TypeInfo, vaArrays if ti == DateType { // Special case as DateType removes the hh:mm:ss val = types.Timestamp(time.Time(val.(types.Timestamp)).Truncate(24 * time.Hour)) require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal) - } else { + } else if ti.GetTypeIdentifier() != DecimalTypeIdentifier { // Any Decimal's on-disk representation varies by precision/scale require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal) } } else { @@ -220,7 +220,7 @@ func testTypeInfoFormatParseRoundTrip(t *testing.T, tiArrays [][]TypeInfo, vaArr if ti == DateType { // special case as DateType removes the hh:mm:ss val = types.Timestamp(time.Time(val.(types.Timestamp)).Truncate(24 * time.Hour)) require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal) - } else { + } else if ti.GetTypeIdentifier() != DecimalTypeIdentifier { // Any Decimal's on-disk representation varies by precision/scale require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal) } } else { @@ -330,7 +330,7 @@ func generateTypeInfoArrays(t *testing.T) ([][]TypeInfo, [][]types.Value) { generateBitTypes(t, 16), {BoolType}, {DateType, DatetimeType, TimestampType}, - //generateDecimalTypes(t, 16), + generateDecimalTypes(t, 16), generateEnumTypes(t, 16), {Float32Type, Float64Type}, {InlineBlobType}, @@ -355,16 +355,19 @@ func generateTypeInfoArrays(t *testing.T) ([][]TypeInfo, [][]types.Value) { types.Timestamp(time.Date(2000, 2, 28, 14, 38, 43, 583395000, time.UTC)), types.Timestamp(time.Date(2038, 1, 19, 3, 14, 7, 999999000, time.UTC)), types.Timestamp(time.Date(9999, 12, 31, 23, 59, 59, 999999000, time.UTC))}, - //{types.String("1"), types.String("-1.5"), types.String("4723245"), //Decimal - // types.String("8923583.125"), types.String("1198728394234798423466321.27349757")}, - {types.String("aaaa"), types.String("aaaa,aaac"), types.String("aaag"), types.String("aaab,aaad,aaaf"), types.String("aaag,aaah")}, //Enum - {types.Float(1.0), types.Float(65513.75), types.Float(4293902592), types.Float(4.58E71), types.Float(7.172E285)}, //Float - {types.InlineBlob{0}, types.InlineBlob{21}, types.InlineBlob{1, 17}, types.InlineBlob{72, 42}, types.InlineBlob{21, 122, 236}}, //InlineBlob - {types.Int(20), types.Int(215), types.Int(237493), types.Int(2035753568), types.Int(2384384576063)}, //Int - {types.String("aa"), types.String("aa,ac"), types.String("ag"), types.String("ab,ad,af"), types.String("ag,ah")}, //Set - {types.String("00:00:00"), types.String("00:00:01"), types.String("00:01:53"), types.String("68:36:59"), types.String("127:27:10.485214")}, //Time - {types.Uint(20), types.Uint(275), types.Uint(328395), types.Uint(630257298), types.Uint(93897259874)}, //Uint - {types.UUID{3}, types.UUID{3, 13}, types.UUID{128, 238, 82, 12}, types.UUID{31, 54, 23, 13, 63, 43}, types.UUID{83, 64, 21, 14, 42, 6, 35, 7, 54, 234, 6, 32, 1, 4, 2, 4}}, //Uuid + {types.Decimal(decimal.RequireFromString("0")), //Decimal + types.Decimal(decimal.RequireFromString("-1.5")), + types.Decimal(decimal.RequireFromString("4723245")), + types.Decimal(decimal.RequireFromString("-1076416.875")), + types.Decimal(decimal.RequireFromString("198728394234798423466321.27349757"))}, + {types.Uint(1), types.Uint(3), types.Uint(5), types.Uint(7), types.Uint(8)}, //Enum + {types.Float(1.0), types.Float(65513.75), types.Float(4293902592), types.Float(4.58E71), types.Float(7.172E285)}, //Float + {types.InlineBlob{0}, types.InlineBlob{21}, types.InlineBlob{1, 17}, types.InlineBlob{72, 42}, types.InlineBlob{21, 122, 236}}, //InlineBlob + {types.Int(20), types.Int(215), types.Int(237493), types.Int(2035753568), types.Int(2384384576063)}, //Int + {types.Uint(1), types.Uint(5), types.Uint(64), types.Uint(42), types.Uint(192)}, //Set + {types.Int(0), types.Int(1000000 /*"00:00:01"*/), types.Int(113000000 /*"00:01:53"*/), types.Int(247019000000 /*"68:36:59"*/), types.Int(458830485214 /*"127:27:10.485214"*/)}, //Time + {types.Uint(20), types.Uint(275), types.Uint(328395), types.Uint(630257298), types.Uint(93897259874)}, //Uint + {types.UUID{3}, types.UUID{3, 13}, types.UUID{128, 238, 82, 12}, types.UUID{31, 54, 23, 13, 63, 43}, types.UUID{83, 64, 21, 14, 42, 6, 35, 7, 54, 234, 6, 32, 1, 4, 2, 4}}, //Uuid //{types.String([]byte{1}), types.String([]byte{42, 52}), types.String([]byte{84, 32, 13, 63, 12, 86}), //VarBinary // types.String([]byte{1, 32, 235, 64, 32, 23, 45, 76}), types.String([]byte{123, 234, 34, 223, 76, 35, 32, 12, 84, 26, 15, 34, 65, 86, 45, 23, 43, 12, 76, 154, 234, 76, 34})}, {types.String(""), types.String("a"), types.String("abc"), //VarString diff --git a/go/libraries/doltcore/schema/typeinfo/varstring_test.go b/go/libraries/doltcore/schema/typeinfo/varstring_test.go index 9e682afbea..b6d3bcd921 100644 --- a/go/libraries/doltcore/schema/typeinfo/varstring_test.go +++ b/go/libraries/doltcore/schema/typeinfo/varstring_test.go @@ -22,7 +22,6 @@ import ( "github.com/src-d/go-mysql-server/sql" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "vitess.io/vitess/go/sqltypes" "github.com/liquidata-inc/dolt/go/store/types" ) @@ -264,26 +263,3 @@ func TestVarStringParseValue(t *testing.T) { }) } } - -func generateVarStringTypes(t *testing.T, numOfTypes uint16) []TypeInfo { - var res []TypeInfo - loop(t, 1, 500, numOfTypes, func(i int64) { - rts := false - if i%2 == 0 { - rts = true - } - res = append(res, generateVarStringType(t, i, rts)) - }) - return res -} - -func generateVarStringType(t *testing.T, length int64, rts bool) *varStringType { - require.True(t, length > 0) - if rts { - t, err := sql.CreateStringWithDefaults(sqltypes.Char, length) - if err == nil { - return &varStringType{t} - } - } - return &varStringType{sql.MustCreateStringWithDefaults(sqltypes.VarChar, length)} -} diff --git a/go/libraries/doltcore/sqle/indexes.go b/go/libraries/doltcore/sqle/indexes.go index 95b11647fa..48c4c19c73 100644 --- a/go/libraries/doltcore/sqle/indexes.go +++ b/go/libraries/doltcore/sqle/indexes.go @@ -23,7 +23,6 @@ import ( "github.com/liquidata-inc/dolt/go/libraries/doltcore/row" "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema" - "github.com/liquidata-inc/dolt/go/store/types" ) // IndexDriver implementation. Not ready for prime time. @@ -96,23 +95,18 @@ type doltIndex struct { } func (di *doltIndex) Get(key ...interface{}) (sql.IndexLookup, error) { - taggedVals, err := keyColsToTuple(di.sch, key) - if err != nil { - return nil, err - } - - return &doltIndexLookup{di, taggedVals}, nil -} - -func keyColsToTuple(sch schema.Schema, key []interface{}) (row.TaggedValues, error) { - if sch.GetPKCols().Size() != len(key) { + if di.sch.GetPKCols().Size() != len(key) { return nil, errors.New("key must specify all columns") } var i int taggedVals := make(row.TaggedValues) - err := sch.GetPKCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { - taggedVals[tag] = keyColToValue(key[i], col) + err := di.sch.GetPKCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { + val, err := col.TypeInfo.ConvertValueToNomsValue(key[i]) + if err != nil { + return true, err + } + taggedVals[tag] = val i++ return false, nil }) @@ -121,63 +115,7 @@ func keyColsToTuple(sch schema.Schema, key []interface{}) (row.TaggedValues, err return nil, err } - return taggedVals, nil -} - -func keyColToValue(v interface{}, column schema.Column) types.Value { - // TODO: type conversion - switch column.Kind { - case types.BoolKind: - return types.Bool(v.(bool)) - case types.IntKind: - switch i := v.(type) { - case int: - return types.Int(i) - case int8: - return types.Int(i) - case int16: - return types.Int(i) - case int32: - return types.Int(i) - case int64: - return types.Int(i) - default: - panic(fmt.Sprintf("unhandled type %T", i)) - } - case types.FloatKind: - return types.Float(v.(float64)) - case types.UintKind: - switch i := v.(type) { - case int: - return types.Uint(i) - case int8: - return types.Uint(i) - case int16: - return types.Uint(i) - case int32: - return types.Uint(i) - case int64: - return types.Uint(i) - case uint: - return types.Uint(i) - case uint8: - return types.Uint(i) - case uint16: - return types.Uint(i) - case uint32: - return types.Uint(i) - case uint64: - return types.Uint(i) - default: - panic(fmt.Sprintf("unhandled type %T", i)) - } - case types.UUIDKind: - panic("Implement me") - case types.StringKind: - return types.String(v.(string)) - default: - panic(fmt.Sprintf("unhandled type %T", v)) - } + return &doltIndexLookup{di, taggedVals}, nil } func (*doltIndex) Has(partition sql.Partition, key ...interface{}) (bool, error) { diff --git a/go/store/types/decimal.go b/go/store/types/decimal.go new file mode 100644 index 0000000000..c43b0ac069 --- /dev/null +++ b/go/store/types/decimal.go @@ -0,0 +1,109 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "context" + + "github.com/shopspring/decimal" + + "github.com/liquidata-inc/dolt/go/store/hash" +) + +type Decimal decimal.Decimal + +func (v Decimal) Value(ctx context.Context) (Value, error) { + return v, nil +} + +func (v Decimal) Equals(other Value) bool { + v2, ok := other.(Decimal) + if !ok { + return false + } + + return decimal.Decimal(v).Equal(decimal.Decimal(v2)) +} + +func (v Decimal) Less(nbf *NomsBinFormat, other LesserValuable) (bool, error) { + if v2, ok := other.(Decimal); ok { + return decimal.Decimal(v).LessThan(decimal.Decimal(v2)), nil + } + return DecimalKind < other.Kind(), nil +} + +func (v Decimal) Hash(nbf *NomsBinFormat) (hash.Hash, error) { + return getHash(v, nbf) +} + +func (v Decimal) isPrimitive() bool { + return true +} + +func (v Decimal) WalkValues(ctx context.Context, cb ValueCallback) error { + return nil +} + +func (v Decimal) WalkRefs(nbf *NomsBinFormat, cb RefCallback) error { + return nil +} + +func (v Decimal) typeOf() (*Type, error) { + return PrimitiveTypeMap[DecimalKind], nil +} + +func (v Decimal) Kind() NomsKind { + return DecimalKind +} + +func (v Decimal) valueReadWriter() ValueReadWriter { + return nil +} + +func (v Decimal) writeTo(w nomsWriter, nbf *NomsBinFormat) error { + encodedDecimal, err := decimal.Decimal(v).GobEncode() + if err != nil { + return err + } + + err = DecimalKind.writeTo(w, nbf) + if err != nil { + return err + } + + w.writeUint16(uint16(len(encodedDecimal))) + w.writeRaw(encodedDecimal) + return nil +} + +func (v Decimal) readFrom(nbf *NomsBinFormat, b *binaryNomsReader) (Value, error) { + size := uint32(b.readUint16()) + db := b.readBytes(size) + dec := decimal.Decimal{} + err := dec.GobDecode(db) + if err != nil { + return nil, err + } + return Decimal(dec), nil +} + +func (v Decimal) skip(nbf *NomsBinFormat, b *binaryNomsReader) { + size := uint32(b.readUint16()) + b.skipBytes(size) +} + +func (v Decimal) HumanReadableString() string { + return decimal.Decimal(v).String() +} diff --git a/go/store/types/decimal_test.go b/go/store/types/decimal_test.go new file mode 100644 index 0000000000..fd8b1eb5a2 --- /dev/null +++ b/go/store/types/decimal_test.go @@ -0,0 +1,34 @@ +// Copyright 2020 Liquidata, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "testing" + + "github.com/shopspring/decimal" + "github.com/stretchr/testify/require" +) + +func TestDecimalLibraryEncoding(t *testing.T) { + expectedBytes := []byte{255, 255, 255, 250, 3, 25, 222, 110, 95, 84, 132} + dec := decimal.RequireFromString("-28443125.175428") + bytes, err := dec.GobEncode() + require.NoError(t, err) + require.Equal(t, expectedBytes, bytes) + expectedDec := decimal.Decimal{} + err = expectedDec.GobDecode(expectedBytes) + require.NoError(t, err) + require.True(t, expectedDec.Equal(dec)) +} diff --git a/go/store/types/map_test.go b/go/store/types/map_test.go index 615bb52c68..c40ecf3877 100644 --- a/go/store/types/map_test.go +++ b/go/store/types/map_test.go @@ -31,6 +31,7 @@ import ( "time" "github.com/google/uuid" + "github.com/shopspring/decimal" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -1668,6 +1669,30 @@ func TestMapOrdering(t *testing.T) { Timestamp(time.Unix(9000, 0).UTC()), }, ) + + testMapOrder(assert, vrw, + PrimitiveTypeMap[DecimalKind], PrimitiveTypeMap[StringKind], + []Value{ + Decimal(decimal.RequireFromString("-99.125434")), String("unused"), + Decimal(decimal.RequireFromString("482.124")), String("unused"), + Decimal(decimal.RequireFromString("858093.12654")), String("unused"), + Decimal(decimal.RequireFromString("1")), String("unused"), + Decimal(decimal.RequireFromString("-99.125432")), String("unused"), + Decimal(decimal.RequireFromString("0")), String("unused"), + Decimal(decimal.RequireFromString("-123845")), String("unused"), + Decimal(decimal.RequireFromString("-99.125433")), String("unused"), + }, + []Value{ + Decimal(decimal.RequireFromString("-123845")), + Decimal(decimal.RequireFromString("-99.125434")), + Decimal(decimal.RequireFromString("-99.125433")), + Decimal(decimal.RequireFromString("-99.125432")), + Decimal(decimal.RequireFromString("0")), + Decimal(decimal.RequireFromString("1")), + Decimal(decimal.RequireFromString("482.124")), + Decimal(decimal.RequireFromString("858093.12654")), + }, + ) } func TestMapEmpty(t *testing.T) { diff --git a/go/store/types/noms_kind.go b/go/store/types/noms_kind.go index 5c05db2ee1..af26427f3b 100644 --- a/go/store/types/noms_kind.go +++ b/go/store/types/noms_kind.go @@ -54,6 +54,7 @@ const ( TupleKind InlineBlobKind TimestampKind + DecimalKind UnknownKind NomsKind = 255 ) @@ -79,6 +80,7 @@ var KindToType = map[NomsKind]Value{ TupleKind: EmptyTuple(Format_7_18), InlineBlobKind: InlineBlob{}, TimestampKind: Timestamp{}, + DecimalKind: Decimal{}, } var KindToTypeSlice []Value @@ -105,6 +107,7 @@ var KindToString = map[NomsKind]string{ TupleKind: "Tuple", InlineBlobKind: "InlineBlob", TimestampKind: "Timestamp", + DecimalKind: "Decimal", } // String returns the name of the kind. diff --git a/go/store/types/type_test.go b/go/store/types/type_test.go index 5db3484659..8f1dbfbd03 100644 --- a/go/store/types/type_test.go +++ b/go/store/types/type_test.go @@ -73,6 +73,7 @@ func TestTypeRefDescribe(t *testing.T) { assert.Equal("Int", mustString(PrimitiveTypeMap[IntKind].Describe(context.Background()))) assert.Equal("Uint", mustString(PrimitiveTypeMap[UintKind].Describe(context.Background()))) assert.Equal("InlineBlob", mustString(PrimitiveTypeMap[InlineBlobKind].Describe(context.Background()))) + assert.Equal("Decimal", mustString(PrimitiveTypeMap[DecimalKind].Describe(context.Background()))) assert.Equal("Map", mustString(mapType.Describe(context.Background()))) assert.Equal("Set", mustString(setType.Describe(context.Background()))) @@ -93,6 +94,7 @@ func TestTypeOrdered(t *testing.T) { assert.True(isKindOrderedByValue(PrimitiveTypeMap[IntKind].TargetKind())) assert.True(isKindOrderedByValue(PrimitiveTypeMap[UintKind].TargetKind())) assert.True(isKindOrderedByValue(PrimitiveTypeMap[InlineBlobKind].TargetKind())) + assert.True(isKindOrderedByValue(PrimitiveTypeMap[DecimalKind].TargetKind())) assert.True(isKindOrderedByValue(TupleKind)) assert.False(isKindOrderedByValue(PrimitiveTypeMap[BlobKind].TargetKind())) From 30c315d6a058998e2a909a7e708633080f861a09 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Tue, 14 Apr 2020 15:24:44 -0700 Subject: [PATCH 51/60] code review feedback --- .../table/typed/noms/noms_map_updater.go | 37 +------------------ 1 file changed, 2 insertions(+), 35 deletions(-) diff --git a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go index d0b0e03179..678ca0ca28 100644 --- a/go/libraries/doltcore/table/typed/noms/noms_map_updater.go +++ b/go/libraries/doltcore/table/typed/noms/noms_map_updater.go @@ -96,43 +96,10 @@ func (nmu *NomsMapUpdater) GetSchema() schema.Schema { // WriteRow will write a row to a table func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error { - if nmu.acc == nil { - return errors.New("Attempting to write after closing.") - } - - if err := nmu.ae.Get(); err != nil { - return err - } - - err := func() error { - pk := r.NomsMapKey(nmu.sch) - fieldVals := r.NomsMapValue(nmu.sch) - - nmu.acc.AddEdit(pk, fieldVals) - nmu.count++ - - if nmu.count%maxEdits == 0 { - edits, err := nmu.acc.FinishedEditing() - - if err != nil { - return err - } - - nmu.mapChan <- edits - nmu.acc = types.CreateEditAccForMapEdits(nmu.vrw.Format()) - } - - return nil - }() - - if err != nil { - return err - } - - return nil + return nmu.WriteEdit(ctx, r.NomsMapKey(nmu.sch), r.NomsMapValue(nmu.sch)) } -// WriteRow will write a row to a table +// WriteEdit will write an edit to a table's edit accumulator func (nmu *NomsMapUpdater) WriteEdit(ctx context.Context, pk types.LesserValuable, fieldVals types.Valuable) error { if nmu.acc == nil { return errors.New("Attempting to write after closing.") From 3babdc7bca62ac67219e219dcffca5bad33fd3ad Mon Sep 17 00:00:00 2001 From: Oscar Batori Date: Wed, 15 Apr 2020 01:11:18 -0700 Subject: [PATCH 52/60] bumped version for 0.16.0 release --- go/cmd/dolt/dolt.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/cmd/dolt/dolt.go b/go/cmd/dolt/dolt.go index fc0b418ec6..9242978c10 100644 --- a/go/cmd/dolt/dolt.go +++ b/go/cmd/dolt/dolt.go @@ -41,7 +41,7 @@ import ( ) const ( - Version = "0.15.2" + Version = "0.16.0" ) var dumpDocsCommand = &commands.DumpDocsCmd{} From ea7e0e159083eaac71bb13960217fceef82ba453 Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Wed, 15 Apr 2020 11:28:24 -0700 Subject: [PATCH 53/60] go/utils/publishrelease: Run the builds in a docker container to a get managed toolchain. --- go/.gitignore | 1 + go/utils/publishrelease/publishrelease.sh | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 go/.gitignore diff --git a/go/.gitignore b/go/.gitignore new file mode 100644 index 0000000000..1fcb1529f8 --- /dev/null +++ b/go/.gitignore @@ -0,0 +1 @@ +out diff --git a/go/utils/publishrelease/publishrelease.sh b/go/utils/publishrelease/publishrelease.sh index d05f0709b3..202072ac63 100755 --- a/go/utils/publishrelease/publishrelease.sh +++ b/go/utils/publishrelease/publishrelease.sh @@ -6,6 +6,11 @@ set -o pipefail script_dir=$(dirname "$0") cd $script_dir/../.. +docker run --rm -v `pwd`:/src golang:1.14.2-buster /bin/bash -c ' +set -e +set -o pipefail +apt-get update && apt-get install -y zip +cd /src BINS="dolt git-dolt git-dolt-smudge" OSES="windows linux darwin" ARCHS="386 amd64" @@ -34,8 +39,9 @@ done render_install_sh() { local parsed=(`grep "Version = " ./cmd/dolt/dolt.go`) local DOLT_VERSION=`eval echo ${parsed[2]}` - sed 's|__DOLT_VERSION__|'"$DOLT_VERSION"'|' utils/publishrelease/install.sh + sed '\''s|__DOLT_VERSION__|'\''"$DOLT_VERSION"'\''|'\'' utils/publishrelease/install.sh } render_install_sh > out/install.sh chmod 755 out/install.sh +' From fd097b388905ced05c257485fdf9969cfbad4a6c Mon Sep 17 00:00:00 2001 From: Aaron Son Date: Wed, 15 Apr 2020 11:38:34 -0700 Subject: [PATCH 54/60] bats/creds.bats: Another pass at working on Windows. --- bats/creds.bats | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/bats/creds.bats b/bats/creds.bats index c184cdb815..80a6a60200 100644 --- a/bats/creds.bats +++ b/bats/creds.bats @@ -89,26 +89,26 @@ teardown() { } @test "can import cred from good jwk file" { - dolt creds import `batshelper creds/known-good.jwk` + dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-good.jwk" } @test "can import cred from good jwk stdin" { - dolt creds import <`batshelper creds/known-good.jwk` + dolt creds import < "$BATS_TEST_DIRNAME/helper/creds/known-good.jwk" } @test "import cred of corrupted jwk from file fails" { - run dolt creds import `batshelper creds/known-truncated.jwk` + run dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-truncated.jwk" [ "$status" -eq 1 ] - run dolt creds import `batshelper creds/known-decapitated.jwk` + run dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-decapitated.jwk" [ "$status" -eq 1 ] run dolt creds import does-not-exist [ "$status" -eq 1 ] } @test "import cred of corrupted jwk from stdin fails" { - run dolt creds import <`batshelper creds/known-truncated.jwk` + run dolt creds import <"$BATS_TEST_DIRNAME/helper/creds/known-truncated.jwk" [ "$status" -eq 1 ] - run dolt creds import <`batshelper creds/known-decapitated.jwk` + run dolt creds import <"$BATS_TEST_DIRNAME/helper/creds/known-decapitated.jwk" [ "$status" -eq 1 ] run dolt creds import Date: Wed, 15 Apr 2020 11:55:08 -0700 Subject: [PATCH 55/60] Added skipped bats test for DATETIME support in schema import --- bats/helper/1pk-datetime.csv | 4 ++++ bats/schema-import.bats | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 bats/helper/1pk-datetime.csv diff --git a/bats/helper/1pk-datetime.csv b/bats/helper/1pk-datetime.csv new file mode 100644 index 0000000000..2144cbe595 --- /dev/null +++ b/bats/helper/1pk-datetime.csv @@ -0,0 +1,4 @@ +pk, test_date +0, 2013-09-24 00:01:35 +1, "2011-10-24 13:17:42" +2, 2018-04-13 \ No newline at end of file diff --git a/bats/schema-import.bats b/bats/schema-import.bats index 6da5f6f826..e11dc0382c 100755 --- a/bats/schema-import.bats +++ b/bats/schema-import.bats @@ -148,7 +148,7 @@ teardown() { } @test "schema import with strings in csv" { - # This CSV has queoted integers for the primary key ie "0","foo",... and + # This CSV has quoted integers for the primary key ie "0","foo",... and # "1","bar",... run dolt schema import -r --keep-types --pks=pk test `batshelper 1pk5col-strings.csv` [ "$status" -eq 0 ] @@ -163,3 +163,11 @@ teardown() { [[ "$output" =~ "\`c6\` LONGTEXT" ]] || false [[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false } + +@test "schema import supports dates andf times" { + run dolt schema import -c --pks=pk test `batshelper 1pk-datetime.csv` + [ "$status" -eq 0 ] + [ "${#lines[@]}" -eq 6 ] + skip "schema import does not support datetime" + [[ "$output" =~ "DATETIME" ]] || false; +} From 082c95db22bab9cb1279a0c1c78e3f8e3a46c289 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Wed, 15 Apr 2020 11:59:49 -0700 Subject: [PATCH 56/60] speacial case init commits --- go/libraries/doltcore/rebase/rebase_tag.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/go/libraries/doltcore/rebase/rebase_tag.go b/go/libraries/doltcore/rebase/rebase_tag.go index e2830e12d7..0dd24dd886 100644 --- a/go/libraries/doltcore/rebase/rebase_tag.go +++ b/go/libraries/doltcore/rebase/rebase_tag.go @@ -39,7 +39,7 @@ const diffBufSize = 4096 // { tableName -> { oldTag -> newTag }} type TagMapping map[string]map[uint64]uint64 -// NeedsUniqueTagMigration checks if a repo was created before the unique tags constraint and migrates it if necessary. +// NeedsUniqueTagMigration checks if a repo needs a unique tags migration func NeedsUniqueTagMigration(ctx context.Context, ddb *doltdb.DoltDB) (bool, error) { bb, err := ddb.GetBranches(ctx) @@ -60,6 +60,16 @@ func NeedsUniqueTagMigration(ctx context.Context, ddb *doltdb.DoltDB) (bool, err return false, err } + // check if this head commit is an init commit + n, err := c.NumParents() + if err != nil { + return false, err + } + if n == 0 { + // init commits don't need migration + continue + } + r, err := c.GetRootValue() if err != nil { From c102795f947b7858367ec10643ff50295ff0dc12 Mon Sep 17 00:00:00 2001 From: Andy Arthur Date: Wed, 15 Apr 2020 12:06:06 -0700 Subject: [PATCH 57/60] added test for init commits --- .../test_files/bats/compatibility.bats | 14 ++++++++++++++ bats/compatibility/test_files/setup_repo.sh | 2 ++ 2 files changed, 16 insertions(+) diff --git a/bats/compatibility/test_files/bats/compatibility.bats b/bats/compatibility/test_files/bats/compatibility.bats index aec3e5f6ee..896f9138ff 100755 --- a/bats/compatibility/test_files/bats/compatibility.bats +++ b/bats/compatibility/test_files/bats/compatibility.bats @@ -163,3 +163,17 @@ teardown() { dolt sql -q 'drop table abc2' } + + +@test "dolt migrate no-data" { + # this will fail for older dolt versions but BATS will swallow the error + run dolt migrate + + dolt checkout no-data + run dolt sql -q 'show tables;' + [ "$status" -eq 0 ] + [[ "$output" =~ "+-------+" ]] || false + [[ "$output" =~ "| Table |" ]] || false + [[ "$output" =~ "+-------+" ]] || false + [[ "$output" =~ "+-------+" ]] || false +} \ No newline at end of file diff --git a/bats/compatibility/test_files/setup_repo.sh b/bats/compatibility/test_files/setup_repo.sh index 9ff3645981..42a2022034 100755 --- a/bats/compatibility/test_files/setup_repo.sh +++ b/bats/compatibility/test_files/setup_repo.sh @@ -7,6 +7,8 @@ cd "$1" dolt init +dolt branch no-data + dolt sql < Date: Wed, 15 Apr 2020 12:40:41 -0700 Subject: [PATCH 58/60] Revert "bats/creds.bats: Another pass at working on Windows." This reverts commit fd097b388905ced05c257485fdf9969cfbad4a6c. --- bats/creds.bats | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/bats/creds.bats b/bats/creds.bats index 80a6a60200..c184cdb815 100644 --- a/bats/creds.bats +++ b/bats/creds.bats @@ -89,26 +89,26 @@ teardown() { } @test "can import cred from good jwk file" { - dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-good.jwk" + dolt creds import `batshelper creds/known-good.jwk` } @test "can import cred from good jwk stdin" { - dolt creds import < "$BATS_TEST_DIRNAME/helper/creds/known-good.jwk" + dolt creds import <`batshelper creds/known-good.jwk` } @test "import cred of corrupted jwk from file fails" { - run dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-truncated.jwk" + run dolt creds import `batshelper creds/known-truncated.jwk` [ "$status" -eq 1 ] - run dolt creds import "$BATS_TEST_DIRNAME/helper/creds/known-decapitated.jwk" + run dolt creds import `batshelper creds/known-decapitated.jwk` [ "$status" -eq 1 ] run dolt creds import does-not-exist [ "$status" -eq 1 ] } @test "import cred of corrupted jwk from stdin fails" { - run dolt creds import <"$BATS_TEST_DIRNAME/helper/creds/known-truncated.jwk" + run dolt creds import <`batshelper creds/known-truncated.jwk` [ "$status" -eq 1 ] - run dolt creds import <"$BATS_TEST_DIRNAME/helper/creds/known-decapitated.jwk" + run dolt creds import <`batshelper creds/known-decapitated.jwk` [ "$status" -eq 1 ] run dolt creds import Date: Wed, 15 Apr 2020 12:41:40 -0700 Subject: [PATCH 59/60] another pass at windows. --- bats/creds.bats | 14 +++++++------- bats/helper/{creds => }/known-decapitated.jwk | 0 bats/helper/{creds => }/known-good.jwk | 0 bats/helper/{creds => }/known-truncated.jwk | 0 4 files changed, 7 insertions(+), 7 deletions(-) rename bats/helper/{creds => }/known-decapitated.jwk (100%) rename bats/helper/{creds => }/known-good.jwk (100%) rename bats/helper/{creds => }/known-truncated.jwk (100%) diff --git a/bats/creds.bats b/bats/creds.bats index c184cdb815..6daa511c5a 100644 --- a/bats/creds.bats +++ b/bats/creds.bats @@ -89,26 +89,26 @@ teardown() { } @test "can import cred from good jwk file" { - dolt creds import `batshelper creds/known-good.jwk` + dolt creds import `batshelper known-good.jwk` } @test "can import cred from good jwk stdin" { - dolt creds import <`batshelper creds/known-good.jwk` + dolt creds import <"$BATS_TEST_DIRNAME/helper/known-good.jwk" } @test "import cred of corrupted jwk from file fails" { - run dolt creds import `batshelper creds/known-truncated.jwk` + run dolt creds import `batshelper known-truncated.jwk` [ "$status" -eq 1 ] - run dolt creds import `batshelper creds/known-decapitated.jwk` + run dolt creds import `batshelper known-decapitated.jwk` [ "$status" -eq 1 ] run dolt creds import does-not-exist [ "$status" -eq 1 ] } @test "import cred of corrupted jwk from stdin fails" { - run dolt creds import <`batshelper creds/known-truncated.jwk` + run dolt creds import <"$BATS_TEST_DIRNAME/helper/known-truncated.jwk" [ "$status" -eq 1 ] - run dolt creds import <`batshelper creds/known-decapitated.jwk` + run dolt creds import <"$BATS_TEST_DIRNAME/helper/known-decapitated.jwk" [ "$status" -eq 1 ] run dolt creds import Date: Wed, 15 Apr 2020 13:24:31 -0700 Subject: [PATCH 60/60] Add json to supported output types in dolt sql --help --- go/cmd/dolt/commands/sql.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go/cmd/dolt/commands/sql.go b/go/cmd/dolt/commands/sql.go index ec116e7456..89bcb2d66b 100644 --- a/go/cmd/dolt/commands/sql.go +++ b/go/cmd/dolt/commands/sql.go @@ -131,7 +131,7 @@ func (cmd SqlCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) er func (cmd SqlCmd) createArgParser() *argparser.ArgParser { ap := argparser.NewArgParser() ap.SupportsString(queryFlag, "q", "SQL query to run", "Runs a single query and exits") - ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv. Defaults to tabular. ") + ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv, json. Defaults to tabular. ") ap.SupportsString(saveFlag, "s", "saved query name", "Used with --query, save the query to the query catalog with the name provided. Saved queries can be examined in the dolt_query_catalog system table.") ap.SupportsString(executeFlag, "x", "saved query name", "Executes a saved query with the given name") ap.SupportsFlag(listSavedFlag, "l", "Lists all saved queries") @@ -464,7 +464,7 @@ func getFormat(format string) (resultFormat, errhand.VerboseError) { case "json": return formatJson, nil default: - return formatTabular, errhand.BuildDError("Invalid argument for --result-format. Valid values are tabular,csv").Build() + return formatTabular, errhand.BuildDError("Invalid argument for --result-format. Valid values are tabular, csv, json").Build() } }