merged master and resolved conflicts

This commit is contained in:
Andy Arthur
2020-03-24 01:00:12 -07:00
parent ed7c10d760
commit f6b1e832ea
92 changed files with 2613 additions and 942 deletions
+1
View File
@@ -0,0 +1 @@
helper/__pycache__
+41
View File
@@ -680,3 +680,44 @@ DELIM
[ "${#lines[@]}" -eq 6 ]
[[ "$output" =~ "<NULL>" ]] || false
}
@test "display correct merge stats" {
dolt checkout -b test-branch
dolt add test
dolt commit -m "added test table"
dolt checkout master
dolt branch test-branch-m
dolt branch test-branch-alt
dolt checkout test-branch-m
dolt merge test-branch
dolt checkout test-branch-alt
dolt sql -q "CREATE TABLE test_alt (pk BIGINT NOT NULL COMMENT 'tag:0', c1 BIGINT COMMENT 'tag:1', PRIMARY KEY (pk));"
dolt add test_alt
dolt commit -m 'add test_alt'
dolt checkout test-branch-m
dolt merge test-branch-alt
dolt add test_alt
dolt commit -m 'merge test_alt'
dolt checkout test-branch
dolt sql -q "insert into test values (0, 1, 2, 3, 4, 5)"
dolt add test
dolt commit -m "added row to test"
dolt checkout test-branch-m
run dolt merge test-branch
[ "$status" -eq 0 ]
[ "${lines[1]}" = "test | 0 " ]
skip "Row addition not totalled correctly" [ "${lines[2]}" = "1 tables changed, 1 rows added(+), 0 rows modified(*), 0 rows deleted(-)" ]
}
@test "checkout table with branch of same name" {
dolt checkout -b test
dolt add .
dolt commit -m "added test table"
dolt sql -q "insert into test values (0, 1, 2, 3, 4, 5)"
run dolt checkout test
skip "Should distinguish between branch name and table name" [ "$status" -eq 0 ]
[ "${lines[0]}" != "Already on branch 'test'" ]
run dolt status
[ "$status" -eq 0 ]
[ "${lines[4]}" != " modified: test" ]
}
@@ -130,4 +130,4 @@ teardown() {
[[ "$output" =~ "Import completed successfully." ]] || false
dolt sql -q 'drop table abc2'
}
}
+99
View File
@@ -0,0 +1,99 @@
import csv
import sys
import time
import mysql.connector
from io import StringIO
from multiprocessing import Process
def _connect(user, host, port, database):
return mysql.connector.connect(user=user, host=host, port=port, database=database)
def _print_err_and_exit(e):
print(e, file=sys.stderr)
sys.exit(1)
def csv_to_row_maps(csv_str):
csv_str = csv_str.replace('\\n', '\n')
rd = csv.DictReader(StringIO(csv_str))
rows = []
for row in rd:
rows.append(row)
return rows
class DoltConnection(object):
def __init__(self, user='root', host='127.0.0.1', port=3306, database='dolt'):
self.user = user
self.host = host
self.port = port
self.database = database
self.cnx = None
def connect(self):
try:
self.cnx = _connect(self.user, self.host, self.port, self.database)
except BaseException as e:
_print_err_and_exit(e)
def close(self):
self.cnx.close()
def query(self, query_str):
try:
cursor = self.cnx.cursor()
cursor.execute(query_str)
if cursor.description is None:
return []
raw = cursor.fetchall()
row_maps = []
for curr in raw:
r = {}
for i, k in enumerate(cursor.column_names):
r[k] = str(curr[i])
row_maps.append(r)
return row_maps
except BaseException as e:
_print_err_and_exit(e)
class InfiniteRetryConnection(DoltConnection):
def connect(self):
while True:
try:
self.cnx = _connect(user=self.user, host=self.host, port=self.port, database=self.database)
try:
self.cnx.close()
except BaseException:
pass
return
except BaseException:
pass
def wait_for_connection(user='root', host='127.0.0.1', port=3306, database='dolt', timeout_ms=5000):
timeoutf = timeout_ms / 1000.0
exit_zero_on_connect = InfiniteRetryConnection(user=user, host=host, port=port, database=database)
cnx_proc = Process(target=exit_zero_on_connect.connect)
cnx_proc.start()
cnx_proc.join(timeout=timeoutf)
if cnx_proc.exitcode is None:
cnx_proc.terminate()
_print_err_and_exit(Exception("Failed to establish connection in time."))
elif cnx_proc.exitcode != 0:
_print_err_and_exit(Exception("Connection process exited with exit code %d." % cnx_proc.exitcode))
+90
View File
@@ -0,0 +1,90 @@
SERVER_REQS_INSTALLED="FALSE"
SERVER_PID=""
set_server_reqs_installed() {
SERVER_REQS_INSTALLED=$(python3 -c "
requirements_installed = True
try:
import mysql.connector
except:
requirements_installed = False
print(str(requirements_installed).upper())
")
}
wait_for_connection() {
PYTEST_DIR="$BATS_TEST_DIRNAME/helper"
python3 -c "
import os
import sys
args = sys.argv[sys.argv.index('--') + 1:]
working_dir, port_str, timeout_ms = args
os.chdir(working_dir)
from pytest import wait_for_connection
wait_for_connection(port=int(port_str), timeout_ms=int(timeout_ms))
" -- $PYTEST_DIR $1 $2
}
start_sql_server() {
let PORT="$$ % (65536-1024) + 1024"
dolt sql-server --port=$PORT &
SERVER_PID=$!
wait_for_connection $PORT 5000
}
stop_sql_server() {
let PORT="$$ % (65536-1024) + 1024"
kill $SERVER_PID
}
# server_query connects to a running mysql server, executes a query and compares the results against what is expected.
# In the event that the results do not match expectations, the python process will exit with an exit code of 1
# * param1 is the query_str
# * param2 is a csv representing the expected result set. If a query is not expected to have a result set "" should
# be passed.
server_query() {
let PORT="$$ % (65536-1024) + 1024"
PYTEST_DIR="$BATS_TEST_DIRNAME/helper"
python3 -c "
import os
import sys
args = sys.argv[sys.argv.index('--') + 1:]
print(args)
working_dir, port_str, query_str, query_results = args
os.chdir(working_dir)
from pytest import DoltConnection, csv_to_row_maps
expected_rows = csv_to_row_maps(query_results)
dc = DoltConnection(port=int(port_str))
dc.connect()
print('executing:', query_str)
actual_rows = dc.query(query_str)
print('expected:', expected_rows, '\n actual:', actual_rows)
if expected_rows != actual_rows:
print('expected:', expected_rows, '\n actual:', actual_rows)
sys.exit(1)
" -- $PYTEST_DIR $PORT "$1" "$2"
}
# update_query runs an update query and should be called with 2 parameters
# * param1 is the query string
# * param2 is the expected number of rows affected
update_query() {
server_query "$1" "matched,updated\n$2,$2"
}
# insert_query runs an insert query and should be called with 2 parameters
# * param1 is the query string
# * param2 is the expected number of rows inserted
insert_query() {
server_query "$1" "updated\n$2"
}
+119
View File
@@ -0,0 +1,119 @@
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_common
dolt sql <<SQL
CREATE TABLE test1 (
pk BIGINT NOT NULL,
c1 BIGINT,
c2 BIGINT,
PRIMARY KEY (pk)
);
SQL
dolt sql <<SQL
CREATE TABLE test2 (
pk BIGINT NOT NULL,
c1 BIGINT,
c2 BIGINT,
PRIMARY KEY (pk)
);
SQL
dolt add .
dolt commit -m "added tables"
}
teardown() {
teardown_common
}
@test "3way merge doesn't stomp working changes" {
dolt checkout -b merge_branch
dolt SQL -q "INSERT INTO test1 values (0,1,2)"
dolt add test1
dolt commit -m "add pk 0 to test1"
dolt checkout master
dolt SQL -q "INSERT INTO test1 values (1,2,3)"
dolt add test1
dolt commit -m "add pk 1 to test1"
dolt SQL -q "INSERT INTO test2 values (0,1,2)"
run dolt status
[ "$status" -eq 0 ]
[[ "$output" =~ "test2" ]] || false
[[ ! "$output" =~ "test1" ]] || false
run dolt merge merge_branch
[ "$status" -eq 0 ]
[[ ! "$output" =~ "Fast-forward" ]] || false
run dolt status
echo -e "\n\noutput: " $output "\n\n"
[ "$status" -eq 0 ]
[[ "$output" =~ "test2" ]] || false
[[ "$output" =~ "test1" ]] || false
}
@test "ff merge doesn't stomp working changes" {
dolt checkout -b merge_branch
dolt SQL -q "INSERT INTO test1 values (0,1,2)"
dolt add test1
dolt commit -m "modify test1"
dolt checkout master
dolt SQL -q "INSERT INTO test2 values (0,1,2)"
run dolt status
[ "$status" -eq 0 ]
[[ "$output" =~ "test2" ]] || false
[[ ! "$output" =~ "test1" ]] || false
run dolt merge merge_branch
[ "$status" -eq 0 ]
[[ "$output" =~ "Fast-forward" ]] || false
run dolt status
[ "$status" -eq 0 ]
[[ "$output" =~ "test2" ]] || false
[[ ! "$output" =~ "test1" ]] || false
}
@test "3way merge rejected when working changes touch same tables" {
dolt checkout -b merge_branch
dolt SQL -q "INSERT INTO test1 values (0,1,2)"
dolt add test1
dolt commit -m "add pk 0 to test1"
dolt checkout master
dolt SQL -q "INSERT INTO test2 values (0,1,2)"
dolt add test2
dolt commit -m "add pk 0 to test2"
dolt SQL -q "INSERT INTO test1 values (1,2,3)"
run dolt status
[ "$status" -eq 0 ]
[[ "$output" =~ "test1" ]] || false
[[ ! "$output" =~ "test2" ]] || false
run dolt merge merge_branch
[ "$status" -eq 1 ]
}
@test "ff merge rejected when working changes touch same tables" {
dolt checkout -b merge_branch
dolt SQL -q "INSERT INTO test1 values (0,1,2)"
dolt add test1
dolt commit -m "modify test1"
dolt checkout master
dolt ls
dolt SQL -q "INSERT INTO test1 values (1,2,3)"
run dolt status
[ "$status" -eq 0 ]
[[ "$output" =~ "test1" ]] || false
[[ ! "$output" =~ "test2" ]] || false
run dolt merge merge_branch
[ "$status" -eq 1 ]
}
+41
View File
@@ -96,3 +96,44 @@ teardown() {
[[ "$output" =~ "name_a" ]] || false
[[ "$output" =~ "name_b" ]] || false
}
@test "executed saved" {
Q1="select pk, pk1, pk2 from one_pk,two_pk where one_pk.c1=two_pk.c1"
Q1_UPDATED="select pk, pk1, pk2 from one_pk,two_pk where one_pk.c1=two_pk.c1 and pk < 3"
Q2="select pk from one_pk"
dolt sql -q "$Q1" -s name1
dolt sql -q "$Q2" -s name2
# executed Q1 and verify output
EXPECTED=$(echo -e "pk,pk1,pk2\n0,0,0\n1,0,1\n2,1,0\n3,1,1")
run dolt sql -r csv -x name1
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
# executed Q2 and verify output
EXPECTED=$(echo -e "pk\n0\n1\n2\n3")
run dolt sql -r csv -x name2
echo $output
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
# execute list-saved and verify output
EXPECTED=$(echo -e "id,display_order,name,query,description\nname1,1,name1,\"$Q1\",\nname2,2,name2,$Q2,")
run dolt sql --list-saved -r csv
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
# update an existing verify output, and verify query catalog is updated
dolt sql -q "$Q1_UPDATED" -s name1
EXPECTED=$(echo -e "pk,pk1,pk2\n0,0,0\n1,0,1\n2,1,0")
run dolt sql -r csv -x name1
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
EXPECTED=$(echo -e "id,display_order,name,query,description\nname1,1,name1,\"$Q1_UPDATED\",\nname2,2,name2,$Q2,")
run dolt sql --list-saved -r csv
[ "$status" -eq 0 ]
echo $output
echo $EXPECTED
[[ "$output" =~ "$EXPECTED" ]] || false
}
+36
View File
@@ -0,0 +1,36 @@
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
load $BATS_TEST_DIRNAME/helper/query-server-common.bash
setup() {
skiponwindows "Has dependencies that are missing on the Jenkins Windows installation."
setup_common
start_sql_server
}
teardown() {
skiponwindows "Has dependencies that are missing on the Jenkins Windows installation."
stop_sql_server
teardown_common
}
@test "test basic querying via dolt sql-server" {
skiponwindows "Has dependencies that are missing on the Jenkins Windows installation."
server_query "SHOW tables" "Table"
server_query "CREATE TABLE one_pk (
pk BIGINT NOT NULL COMMENT 'tag:0',
c1 BIGINT COMMENT 'tag:1',
c2 BIGINT COMMENT 'tag:2',
PRIMARY KEY (pk)
)" ""
server_query "SHOW tables" "Table\none_pk"
insert_query "INSERT INTO one_pk (pk) values (0)" 1
server_query "SELECT * from one_pk ORDER BY pk" "pk,c1,c2\n0,None,None"
insert_query "INSERT INTO one_pk (pk,c1) values (1,1)" 1
insert_query "INSERT INTO one_pk (pk,c1,c2) values (2,2,2),(3,3,3)" 2
server_query "SELECT * from one_pk ORDER by pk" "pk,c1,c2\n0,None,None\n1,1,None\n2,2,2\n3,3,3"
update_query "UPDATE one_pk SET c2=c1 WHERE c2 is NULL and c1 IS NOT NULL" 1
}
+113 -1
View File
@@ -56,6 +56,110 @@ teardown() {
[ "${#lines[@]}" -eq 8 ]
}
@test "sql AS OF queries" {
dolt add .
dolt commit -m "Initial master commit" --date "2020-03-01T12:00:00Z"
master_commit=`dolt log | head -n1 | cut -d' ' -f2`
dolt sql -q "update one_pk set c1 = c1 + 1"
dolt sql -q "drop table two_pk"
dolt checkout -b new_branch
dolt add .
dolt commit -m "Updated a table, dropped a table" --date "2020-03-01T13:00:00Z"
new_commit=`dolt log | head -n1 | cut -d' ' -f2`
run dolt sql -r csv -q "select pk,c1 from one_pk order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,1" ]] || false
[[ "$output" =~ "1,11" ]] || false
[[ "$output" =~ "2,21" ]] || false
[[ "$output" =~ "3,31" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of 'master' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of '$master_commit' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
run dolt sql -r csv -q "select count(*) from two_pk as of 'master'"
[ $status -eq 0 ]
[[ "$output" =~ "4" ]] || false
run dolt sql -r csv -q "select count(*) from two_pk as of '$master_commit'"
[ $status -eq 0 ]
[[ "$output" =~ "4" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of 'HEAD~' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of 'new_branch^' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
dolt checkout master
run dolt sql -r csv -q "select pk,c1 from one_pk as of 'new_branch' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,1" ]] || false
[[ "$output" =~ "1,11" ]] || false
[[ "$output" =~ "2,21" ]] || false
[[ "$output" =~ "3,31" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of '$new_commit' order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,1" ]] || false
[[ "$output" =~ "1,11" ]] || false
[[ "$output" =~ "2,21" ]] || false
[[ "$output" =~ "3,31" ]] || false
dolt checkout new_branch
run dolt sql -r csv -q "select pk,c1 from one_pk as of CONVERT('2020-03-01 12:00:00', DATETIME) order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of CONVERT('2020-03-01 12:15:00', DATETIME) order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,0" ]] || false
[[ "$output" =~ "1,10" ]] || false
[[ "$output" =~ "2,20" ]] || false
[[ "$output" =~ "3,30" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of CONVERT('2020-03-01 13:00:00', DATETIME) order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,1" ]] || false
[[ "$output" =~ "1,11" ]] || false
[[ "$output" =~ "2,21" ]] || false
[[ "$output" =~ "3,31" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of CONVERT('2020-03-01 13:15:00', DATETIME) order by c1"
[ $status -eq 0 ]
[[ "$output" =~ "0,1" ]] || false
[[ "$output" =~ "1,11" ]] || false
[[ "$output" =~ "2,21" ]] || false
[[ "$output" =~ "3,31" ]] || false
run dolt sql -r csv -q "select pk,c1 from one_pk as of CONVERT('2020-03-01 11:59:59', DATETIME) order by c1"
[ $status -eq 1 ]
[[ "$output" =~ "not found" ]] || false
}
@test "sql ambiguous column name" {
run dolt sql -q "select pk,pk1,pk2 from one_pk,two_pk where c1=0"
[ "$status" -eq 1 ]
@@ -426,7 +530,6 @@ teardown() {
@test "sql update a datetime column" {
dolt sql -q "insert into has_datetimes (pk) values (1)"
run dolt sql -q "update has_datetimes set date_created='2020-02-11 00:00:00' where pk=1"
skip "Can't use update on a datetime field"
[ $status -eq 0 ]
[[ ! "$output" =~ "Expected GetField expression" ]] || false
}
@@ -468,6 +571,15 @@ teardown() {
[[ ! "$output" =~ "panic: " ]] || false
}
@test "sql delete all rows in table" {
run dolt sql <<SQL
DELETE FROM one_pk;
SELECT count(*) FROM one_pk;
SQL
[ $status -eq 0 ]
[[ "$output" =~ "0" ]] || false
}
@test "sql shell works after failing query" {
skiponwindows "Need to install expect and make this script work on windows."
$BATS_TEST_DIRNAME/sql-works-after-failing-query.expect
+7 -3
View File
@@ -50,11 +50,15 @@ func ParseArgs(ap *argparser.ArgParser, args []string, usagePrinter UsagePrinter
return apr
}
func HelpAndUsagePrinters(commandStr, shortDesc, longDesc string, synopsis []string, ap *argparser.ArgParser) (UsagePrinter, UsagePrinter) {
func HelpAndUsagePrinters(cmdDoc CommandDocumentation) (UsagePrinter, UsagePrinter) {
// TODO handle error states
longDesc, _ := cmdDoc.GetLongDesc(CliFormat)
synopsis, _ := cmdDoc.GetSynopsis(CliFormat)
return func() {
PrintHelpText(commandStr, shortDesc, longDesc, synopsis, ap)
PrintHelpText(cmdDoc.CommandStr, cmdDoc.GetShortDesc(), longDesc, synopsis, cmdDoc.ArgParser)
}, func() {
PrintUsage(commandStr, synopsis, ap)
PrintUsage(cmdDoc.CommandStr, synopsis, cmdDoc.ArgParser)
}
}
+10
View File
@@ -58,6 +58,16 @@ type Command interface {
CreateMarkdown(fs filesys.Filesys, path, commandStr string) error
}
// This type is to store the content of a documented command, elsewhere we can transform this struct into
// other structs that are used to generate documentation at the command line and in markdown files.
type CommandDocumentationContent struct {
ShortDesc string
LongDesc string
Synopsis []string
}
//type CommandDocumentation
// RepoNotRequiredCommand is an optional interface that commands can implement if the command can be run without
// the current directory being a valid Dolt data repository. Any commands not implementing this interface are
// assumed to require that they be run from a directory containing a Dolt data repository.
+280
View File
@@ -0,0 +1,280 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cli
import (
"bytes"
"fmt"
"strings"
"text/template"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
)
type commandDocumentForMarkdown struct {
Command string
CommandAndShortDesc string
Synopsis string
Description string
Options string
}
var cmdMdDocTempl = `---
title: {{.Command}}
---
## Command
{{.CommandAndShortDesc}}
## Synopsis
{{.Synopsis}}
## Description
{{.Description}}
## Options
{{.Options}}
`
func (cmdDoc CommandDocumentation) CmdDocToMd() (string, error) {
// Accumulate the options and args in a string
options := ""
if len(cmdDoc.ArgParser.Supported) > 0 || len(cmdDoc.ArgParser.ArgListHelp) > 0 {
// Iterate across arguments and template them
for _, kvTuple := range cmdDoc.ArgParser.ArgListHelp {
arg, desc := kvTuple[0], kvTuple[1]
templatedDesc, err := templateDocStringHelper(desc, MarkdownFormat)
if err != nil {
return "", err
}
argStruct := argument{arg, templatedDesc}
outputStr, err := templateArgument(argStruct)
if err != nil {
return "", err
}
options += outputStr
}
// Iterate accross supported options, templating each one of them
for _, supOpt := range cmdDoc.ArgParser.Supported {
templatedDesc, err := templateDocStringHelper(supOpt.Desc, MarkdownFormat)
if err != nil {
return "", err
}
argStruct := supported{supOpt.Abbrev, supOpt.Name, templatedDesc}
outputStr, err := templateSupported(argStruct)
if err != nil {
return "", err
}
options += outputStr
}
} else {
options = `No options for this command.`
}
cmdMdDoc, cmdMdDocErr := cmdDoc.cmdDocToCmdDocMd(options)
if cmdMdDocErr != nil {
return "", nil
}
templ, templErr := template.New("shortDesc").Parse(cmdMdDocTempl)
if templErr != nil {
return "", templErr
}
var templBuffer bytes.Buffer
if err := templ.Execute(&templBuffer, cmdMdDoc); err != nil {
return "", err
}
return templBuffer.String(), nil
}
// A struct that represents all the data structures required to create the documentation for a command.
type CommandDocumentation struct {
// The command/sub-command string passed to a command by the caller
CommandStr string
// The short description of the command
ShortDesc string
// The long description of the command
LongDesc string
// The synopsis, an array of strings showing how to use the command
Synopsis []string
// A structure that
ArgParser *argparser.ArgParser
}
func (cmdDoc CommandDocumentation) cmdDocToCmdDocMd(options string) (commandDocumentForMarkdown, error) {
longDesc, longDescErr := cmdDoc.GetLongDesc(MarkdownFormat)
if longDescErr != nil {
return commandDocumentForMarkdown{}, longDescErr
}
synopsis, synopsisErr := cmdDoc.GetSynopsis(SynopsisMarkdownFormat)
if synopsisErr != nil {
return commandDocumentForMarkdown{}, synopsisErr
}
return commandDocumentForMarkdown{
Command: cmdDoc.CommandStr,
CommandAndShortDesc: fmt.Sprintf("`%s` - %s\n\n", cmdDoc.CommandStr, cmdDoc.GetShortDesc()),
Synopsis: transformSynopsisToHtml(cmdDoc.CommandStr, synopsis),
Description: longDesc,
Options: options,
}, nil
}
// Creates a CommandDocumentation given command string, arg parser, and a CommandDocumentationContent
func GetCommandDocumentation(commandStr string, cmdDoc CommandDocumentationContent, argParser *argparser.ArgParser) CommandDocumentation {
return CommandDocumentation{
CommandStr: commandStr,
ShortDesc: cmdDoc.ShortDesc,
LongDesc: cmdDoc.LongDesc,
Synopsis: cmdDoc.Synopsis,
ArgParser: argParser,
}
}
// Returns the ShortDesc field of the receiver CommandDocumentation with the passed DocFormat injected into the template
func (cmdDoc CommandDocumentation) GetShortDesc() string {
return cmdDoc.ShortDesc
}
// Returns the LongDesc field of the receiver CommandDocumentation with the passed DocFormat injected into the template
func (cmdDoc CommandDocumentation) GetLongDesc(format docFormat) (string, error) {
return templateDocStringHelper(cmdDoc.LongDesc, format)
}
func templateDocStringHelper(docString string, docFormat docFormat) (string, error) {
templ, err := template.New("description").Parse(docString)
if err != nil {
return "", err
}
var templBuffer bytes.Buffer
if err := templ.Execute(&templBuffer, docFormat); err != nil {
return "", err
}
return templBuffer.String(), nil
}
// Returns the synopsis iterating over each element and injecting the supplied DocFormat
func (cmdDoc CommandDocumentation) GetSynopsis(format docFormat) ([]string, error) {
lines := cmdDoc.Synopsis
for i, line := range lines {
formatted, err := templateDocStringHelper(line, format)
if err != nil {
return []string{}, err
}
lines[i] = formatted
}
return lines, nil
}
type docFormat struct {
LessThan string
GreaterThan string
EmphasisLeft string
EmphasisRight string
}
// mdx format
var MarkdownFormat = docFormat{"`<", ">`", "`", "`"}
// Shell help output format
var CliFormat = docFormat{"<", ">", "<b>", "</b>"}
// Special format for the synopsis which is rendered inside raw HTML in markdown
var SynopsisMarkdownFormat = docFormat{"&lt;", "&gt;", "`", "`"}
func transformSynopsisToHtml(commandStr string, synopsis []string) string {
if len(synopsis) == 0 {
return ""
}
synopsisStr := fmt.Sprintf("%s %s<br />\n", commandStr, synopsis[0])
if len(synopsis) > 1 {
temp := make([]string, len(synopsis)-1)
for i, el := range synopsis[1:] {
temp[i] = fmt.Sprintf("\t\t\t%s %s<br />\n", commandStr, el)
}
synopsisStr += strings.Join(temp, "")
}
html := `
<div class="gatsby-highlight" data-language="text">
<pre class="language-text">
<code class="language-text">
%s
</code>
</pre>
</div>
`
return fmt.Sprintf(html, synopsisStr)
}
type argument struct {
Name string
Description string
}
func templateArgument(supportedArg argument) (string, error) {
var formatString string
if supportedArg.Description == "" {
formatString = "`<{{.Name}}>`\n\n"
} else {
formatString = "`<{{.Name}}>`:\n\n{{.Description}}\n\n"
}
templ, err := template.New("argString").Parse(formatString)
if err != nil {
return "", err
}
var templBuffer bytes.Buffer
if err := templ.Execute(&templBuffer, supportedArg); err != nil {
return "", err
}
ret := templBuffer.String()
return ret, nil
}
type supported struct {
Abbreviation string
Name string
Description string
}
func templateSupported(supported supported) (string, error) {
var formatString string
if supported.Abbreviation == "" && supported.Description == "" {
formatString = "`--{{.Name}}`\n\n"
} else if supported.Abbreviation == "" && supported.Description != "" {
formatString = "`--{{.Name}}`:\n{{.Description}}\n\n"
} else if supported.Abbreviation != "" && supported.Description == "" {
formatString = "`-{{.Abbreviation}}`, `--{{.Name}}`\n\n"
} else {
formatString = "`-{{.Abbreviation}}`, `--{{.Name}}`:\n{{.Description}}\n\n"
}
templ, err := template.New("argString").Parse(formatString)
if err != nil {
return "", err
}
var templBuffer bytes.Buffer
if err := templ.Execute(&templBuffer, supported); err != nil {
return "", err
}
ret := templBuffer.String()
return ret, nil
}
-64
View File
@@ -24,9 +24,7 @@ import (
"github.com/fatih/color"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
"github.com/liquidata-inc/dolt/go/libraries/utils/funcitr"
"github.com/liquidata-inc/dolt/go/libraries/utils/iohelp"
)
var underline = color.New(color.Underline)
@@ -68,59 +66,6 @@ func PrintHelpText(commandStr, shortDesc, longDesc string, synopsis []string, pa
}
}
func CreateMarkdown(fs filesys.Filesys, path, commandStr, shortDesc, longDesc string, synopsis []string, parser *argparser.ArgParser) error {
wr, err := fs.OpenForWrite(path)
if err != nil {
return err
}
defer wr.Close()
err = iohelp.WriteIfNoErr(wr, []byte("## Command\n\n"), nil)
err = iohelp.WriteIfNoErr(wr, []byte(commandStr+" - "+shortDesc+"\n\n"), err)
if len(synopsis) > 0 {
err = iohelp.WriteIfNoErr(wr, []byte("## Synopsis\n\n"), err)
err = iohelp.WriteIfNoErr(wr, []byte("```sh\n"), err)
for _, synopsisLine := range synopsis {
err = iohelp.WriteIfNoErr(wr, []byte(commandStr+" "+synopsisLine+"\n"), err)
}
err = iohelp.WriteIfNoErr(wr, []byte("```\n\n"), err)
}
err = iohelp.WriteIfNoErr(wr, []byte("## Description\n\n"), err)
err = iohelp.WriteIfNoErr(wr, []byte(markdownEscape(longDesc)+"\n\n"), err)
if len(parser.Supported) > 0 || len(parser.ArgListHelp) > 0 {
err = iohelp.WriteIfNoErr(wr, []byte("## Options\n\n"), err)
for _, kvTuple := range parser.ArgListHelp {
k, v := kvTuple[0], kvTuple[1]
err = iohelp.WriteIfNoErr(wr, []byte("&lt;"+k+"&gt;\n"+v+"\n\n"), err)
}
for _, supOpt := range parser.Supported {
argHelpFmt := "--%[2]s"
if supOpt.Abbrev != "" && supOpt.ValDesc != "" {
argHelpFmt = "-%[1]s &lt;%[3]s&gt;, --%[2]s=&lt;%[3]s&gt;"
} else if supOpt.Abbrev != "" {
argHelpFmt = "-%[1]s, --%[2]s"
} else if supOpt.ValDesc != "" {
argHelpFmt = "--%[2]s=&lt;%[3]s&gt;"
}
argHelp := fmt.Sprintf(argHelpFmt, supOpt.Abbrev, supOpt.Name, supOpt.ValDesc)
err = iohelp.WriteIfNoErr(wr, []byte(argHelp+"\n"), err)
err = iohelp.WriteIfNoErr(wr, []byte(supOpt.Desc+"\n\n"), err)
}
}
return err
}
func PrintUsage(commandStr string, synopsis []string, parser *argparser.ArgParser) {
_, termWidth := terminalSize()
@@ -156,15 +101,6 @@ const (
var bold = color.New(color.Bold)
func markdownEscape(str string) string {
str = strings.ReplaceAll(str, "<b>", "**")
str = strings.ReplaceAll(str, "</b>", "**")
str = strings.ReplaceAll(str, "<", "&lt;")
str = strings.ReplaceAll(str, ">", "&gt;")
return str
}
func embolden(str string) string {
res := ""
curr := str
-30
View File
@@ -16,8 +16,6 @@ package cli
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestToIndentedParagraph(t *testing.T) {
@@ -62,31 +60,3 @@ func TestEmbolden(t *testing.T) {
}
}
}
func TestMarkdownEscape(t *testing.T) {
tests := []struct {
name string
str string
expected string
}{
{"Nothing to do", "some text no angle brackets", "some text no angle brackets"},
{"Open with no close", "x < y, ", "x &lt; y, "},
{"Close with no open", "x &gt; y, ", "x &gt; y, "},
{"Begin with open, no close", "<something", "&lt;something"},
{"End with close with no begin", "something&gt;", "something&gt;"},
{"Basic escape", "test <test> test", "test &lt;test&gt; test"},
{"Start", "<test> test", "&lt;test&gt; test"},
{"End", "test <test>", "test &lt;test&gt;"},
{"Start and end", "<test>", "&lt;test&gt;"},
{"Start after end", "this > that, <test>, that < this", "this &gt; that, &lt;test&gt;, that &lt; this"},
{"has spaces", "<has spaces>", "&lt;has spaces&gt;"},
{"bold tags", "regular text, <b>bolt text</b>, more regular", "regular text, **bolt text**, more regular"},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
res := markdownEscape(test.str)
assert.Equal(t, test.expected, res)
})
}
}
+10 -7
View File
@@ -31,14 +31,17 @@ const (
allParam = "all"
)
var addShortDesc = `Add table contents to the list of staged tables`
var addLongDesc = `This command updates the list of tables using the current content found in the working root, to prepare the content staged for the next commit. It adds the current content of existing tables as a whole or remove tables that do not exist in the working root anymore.
var addDocs = cli.CommandDocumentationContent{
ShortDesc: `Add table contents to the list of staged tables`,
LongDesc: `
This command updates the list of tables using the current content found in the working root, to prepare the content staged for the next commit. It adds the current content of existing tables as a whole or remove tables that do not exist in the working root anymore.
This command can be performed multiple times before a commit. It only adds the content of the specified table(s) at the time the add command is run; if you want subsequent changes included in the next commit, then you must run dolt add again to add the new content to the index.
The dolt status command can be used to obtain a summary of which tables have changes that are staged for the next commit.`
var addSynopsis = []string{
`[<table>...]`,
The dolt status command can be used to obtain a summary of which tables have changes that are staged for the next commit.`,
Synopsis: []string{
`[{{.LessThan}}table{{.GreaterThan}}...]`,
},
}
type AddCmd struct{}
@@ -56,7 +59,7 @@ func (cmd AddCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd AddCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, addShortDesc, addLongDesc, addSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, addDocs, ap))
}
func (cmd AddCmd) createArgParser() *argparser.ArgParser {
@@ -69,7 +72,7 @@ func (cmd AddCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd AddCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
helpPr, _ := cli.HelpAndUsagePrinters(commandStr, addShortDesc, addLongDesc, addSynopsis, ap)
helpPr, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, addDocs, ap))
apr := cli.ParseArgs(ap, args, helpPr)
if apr.ContainsArg(doltdb.DocTableName) {
+8 -7
View File
@@ -35,11 +35,12 @@ import (
"github.com/liquidata-inc/dolt/go/store/types"
)
var blameShortDesc = `Show what revision and author last modified each row of a table`
var blameLongDesc = `Annotates each row in the given table with information from the revision which last modified the row. Optionally, start annotating from the given revision.`
var blameSynopsis = []string{
`[<rev>] <tablename>`,
var blameDocs = cli.CommandDocumentationContent{
ShortDesc: `Show what revision and author last modified each row of a table`,
LongDesc: `Annotates each row in the given table with information from the revision which last modified the row. Optionally, start annotating from the given revision.`,
Synopsis: []string{
`[{{.LessThan}}rev{{.GreaterThan}}] {{.LessThan}}tablename{{.GreaterThan}}`,
},
}
// blameInfo contains blame information for a row
@@ -88,7 +89,7 @@ func (cmd BlameCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd BlameCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, blameShortDesc, blameLongDesc, blameSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, blameDocs, ap))
}
func (cmd BlameCmd) createArgParser() *argparser.ArgParser {
@@ -119,7 +120,7 @@ func (cmd BlameCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd BlameCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, blameShortDesc, blameLongDesc, blameSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, blameDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() == 0 || apr.NArg() > 2 {
+19 -21
View File
@@ -34,30 +34,28 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/set"
)
var branchShortDesc = `List, create, or delete branches`
var branchLongDesc = `If <b>--list</b> is given, or if there are no non-option arguments, existing branches are listed; the current branch will be highlighted with an asterisk.
var branchForceFlagDesc = "Reset {{.LessThan}}branchname{{.GreaterThan}} to {{.LessThan}}startpoint{{.GreaterThan}}, even if {{.LessThan}}branchname{{.GreaterThan}} exists already. Without {{.EmphasisLeft}}-f{{.EmphasisRight}}, {{.EmphasisLeft}}dolt branch{{.EmphasisRight}} refuses to change an existing branch. In combination with {{.EmphasisLeft}}-d{{.EmphasisRight}} (or {{.EmphasisLeft}}--delete{{.EmphasisRight}}), allow deleting the branch irrespective of its merged status. In combination with -m (or {{.EmphasisLeft}}--move{{.EmphasisRight}}), allow renaming the branch even if the new branch name already exists, the same applies for {{.EmphasisLeft}}-c{{.EmphasisRight}} (or {{.EmphasisLeft}}--copy{{.EmphasisRight}})."
The command's second form creates a new branch head named <branchname> which points to the current <b>HEAD</b>, or <start-point> if given.
var branchDocs = cli.CommandDocumentationContent{
ShortDesc: `List, create, or delete branches`,
LongDesc: `If {{.EmphasisLeft}}--list{{.EmphasisRight}} is given, or if there are no non-option arguments, existing branches are listed; the current branch will be highlighted with an asterisk.
Note that this will create the new branch, but it will not switch the working tree to it; use "dolt checkout <newbranch>" to switch to the new branch.
The command's second form creates a new branch head named {{.LessThan}}branchname{{.GreaterThan}} which points to the current {{.EmphasisLeft}}HEAD{{.EmphasisRight}}, or {{.LessThan}}start-point{{.GreaterThan}} if given.
With a <b>-m</b>, <oldbranch> will be renamed to <newbranch>. If <newbranch> exists, -f must be used to force the rename to happen.
Note that this will create the new branch, but it will not switch the working tree to it; use {{.EmphasisLeft}}dolt checkout <newbranch>{{.EmphasisRight}} to switch to the new branch.
The <b>-c</b> options have the exact same semantics as <b>-m</b>, except instead of the branch being renamed it will be copied to a new name.
With a {{.EmphasisLeft}}-m{{.EmphasisRight}}, {{.LessThan}}oldbranch{{.GreaterThan}} will be renamed to {{.LessThan}}newbranch{{.GreaterThan}}. If {{.LessThan}}newbranch{{.GreaterThan}} exists, -f must be used to force the rename to happen.
With a <b>-d</b>, <branchname> will be deleted. You may specify more than one branch for deletion.`
The {{.EmphasisLeft}}-c{{.EmphasisRight}} options have the exact same semantics as {{.EmphasisLeft}}-m{{.EmphasisRight}}, except instead of the branch being renamed it will be copied to a new name.
var branchForceFlagDesc = "Reset <branchname> to <startpoint>, even if <branchname> exists already. Without -f, dolt branch " +
"refuses to change an existing branch. In combination with -d (or --delete), allow deleting the branch irrespective " +
"of its merged status. In combination with -m (or --move), allow renaming the branch even if the new branch name " +
"already exists, the same applies for -c (or --copy)."
var branchSynopsis = []string{
`[--list] [-v] [-a]`,
`[-f] <branchname> [<start-point>]`,
`-m [-f] [<oldbranch>] <newbranch>`,
`-c [-f] [<oldbranch>] <newbranch>`,
`-d [-f] <branchname>...`,
With a {{.EmphasisLeft}}-d{{.EmphasisRight}}, {{.LessThan}}branchname{{.GreaterThan}} will be deleted. You may specify more than one branch for deletion.`,
Synopsis: []string{
`[--list] [-v] [-a]`,
`[-f] {{.LessThan}}branchname{{.GreaterThan}} [{{.LessThan}}start-point{{.GreaterThan}}]`,
`-m [-f] [{{.LessThan}}oldbranch{{.GreaterThan}}] {{.LessThan}}newbranch{{.GreaterThan}}`,
`-c [-f] [{{.LessThan}}oldbranch{{.GreaterThan}}] {{.LessThan}}newbranch{{.GreaterThan}}`,
`-d [-f] {{.LessThan}}branchname{{.GreaterThan}}...`,
},
}
const (
@@ -86,7 +84,7 @@ func (cmd BranchCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd BranchCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, branchShortDesc, branchLongDesc, branchSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, branchDocs, ap))
}
func (cmd BranchCmd) createArgParser() *argparser.ArgParser {
@@ -97,7 +95,7 @@ func (cmd BranchCmd) createArgParser() *argparser.ArgParser {
ap.SupportsFlag(copyFlag, "c", "Create a copy of a branch.")
ap.SupportsFlag(moveFlag, "m", "Move/rename a branch")
ap.SupportsFlag(deleteFlag, "d", "Delete a branch. The branch must be fully merged in its upstream branch.")
ap.SupportsFlag(deleteForceFlag, "", "Shortcut for --delete --force.")
ap.SupportsFlag(deleteForceFlag, "", "Shortcut for {{.EmphasisLeft}}--delete --force{{.EmphasisRight}}.")
ap.SupportsFlag(verboseFlag, "v", "When in list mode, show the hash and commit subject line for each head")
ap.SupportsFlag(allFlag, "a", "When in list mode, shows remote tracked branches")
return ap
@@ -111,7 +109,7 @@ func (cmd BranchCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd BranchCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, branchShortDesc, branchLongDesc, branchSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, branchDocs, ap))
apr := cli.ParseArgs(ap, args, help)
switch {
+18 -16
View File
@@ -28,23 +28,25 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var coShortDesc = `Switch branches or restore working tree tables`
var coLongDesc = `Updates tables in the working set to match the staged versions. If no paths are given, dolt checkout will also update HEAD to set the specified branch as the current branch.
var checkoutDocs = cli.CommandDocumentationContent{
ShortDesc: `Switch branches or restore working tree tables`,
LongDesc: `
Updates tables in the working set to match the staged versions. If no paths are given, dolt checkout will also update HEAD to set the specified branch as the current branch.
dolt checkout <branch>
To prepare for working on <branch>, switch to it by updating the index and the tables in the working tree, and by pointing HEAD at the branch. Local modifications to the tables in the working
tree are kept, so that they can be committed to the <branch>.
dolt checkout {{.LessThan}}}branch{{.GreaterThan}}
To prepare for working on {{.LessThan}}}branch{{.GreaterThan}}, switch to it by updating the index and the tables in the working tree, and by pointing HEAD at the branch. Local modifications to the tables in the working
tree are kept, so that they can be committed to the {{.LessThan}}}branch{{.GreaterThan}}.
dolt checkout -b <new_branch> [<start_point>]
dolt checkout -b {{.LessThan}}}new_branch{{.GreaterThan}} [{{.LessThan}}}start_point{{.GreaterThan}}]
Specifying -b causes a new branch to be created as if dolt branch were called and then checked out.
dolt checkout <table>...
To update table(s) with their values in HEAD `
var coSynopsis = []string{
`<branch>`,
`<table>...`,
`-b <new-branch> [<start-point>]`,
dolt checkout {{.LessThan}}}table{{.GreaterThan}}...
To update table(s) with their values in HEAD `,
Synopsis: []string{
`{{.LessThan}}branch{{.GreaterThan}}`,
`{{.LessThan}}table{{.GreaterThan}}...`,
`-b {{.LessThan}}new-branch{{.GreaterThan}} [{{.LessThan}}start-point{{.GreaterThan}}]`,
},
}
const coBranchArg = "b"
@@ -64,12 +66,12 @@ func (cmd CheckoutCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CheckoutCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, coShortDesc, coLongDesc, coSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, checkoutDocs, ap))
}
func (cmd CheckoutCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsString(coBranchArg, "", "branch", "Create a new branch named <new_branch> and start it at <start_point>.")
ap.SupportsString(coBranchArg, "", "branch", "Create a new branch named {{.LessThan}}new_branch{{.GreaterThan}} and start it at {{.LessThan}}start_point{{.GreaterThan}}.")
return ap
}
@@ -81,7 +83,7 @@ func (cmd CheckoutCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd CheckoutCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
helpPrt, usagePrt := cli.HelpAndUsagePrinters(commandStr, coShortDesc, coLongDesc, coSynopsis, ap)
helpPrt, usagePrt := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, checkoutDocs, ap))
apr := cli.ParseArgs(ap, args, helpPrt)
if (apr.Contains(coBranchArg) && apr.NArg() > 1) || (!apr.Contains(coBranchArg) && apr.NArg() == 0) {
+13 -14
View File
@@ -45,18 +45,17 @@ const (
branchParam = "branch"
)
var cloneShortDesc = "Clone a data repository into a new directory"
var cloneLongDesc = "Clones a repository into a newly created directory, creates remote-tracking branches for each " +
"branch in the cloned repository (visible using dolt branch -a), and creates and checks out an initial branch that " +
"is forked from the cloned repository's currently active branch.\n" +
"\n" +
"After the clone, a plain <b>dolt fetch</b> without arguments will update all the remote-tracking branches, and a <b>dolt " +
"pull</b> without arguments will in addition merge the remote branch into the current branch\n" +
"\n" +
"This default configuration is achieved by creating references to the remote branch heads under refs/remotes/origin " +
"and by creating a remote named 'origin'."
var cloneSynopsis = []string{
"[-remote <remote>] [-branch <branch>] [--aws-region <region>] [--aws-creds-type <creds-type>] [--aws-creds-file <file>] [--aws-creds-profile <profile>] <remote-url> <new-dir>",
var cloneDocs = cli.CommandDocumentationContent{
ShortDesc: "Clone a data repository into a new directory",
LongDesc: `Clones a repository into a newly created directory, creates remote-tracking branches for each branch in the cloned repository (visible using {{.LessThan}}dolt branch -a{{.GreaterThan}}), and creates and checks out an initial branch that is forked from the cloned repository's currently active branch.
After the clone, a plain {{.EmphasisLeft}}dolt fetch{{.EmphasisRight}} without arguments will update all the remote-tracking branches, and a {{.EmphasisLeft}}dolt pull{{.EmphasisRight}} without arguments will in addition merge the remote branch into the current branch.
This default configuration is achieved by creating references to the remote branch heads under {{.LessThan}}refs/remotes/origin{{.GreaterThan}} and by creating a remote named 'origin'.
`,
Synopsis: []string{
"[-remote {{.LessThan}}remote{{.GreaterThan}}] [-branch {{.LessThan}}branch{{.GreaterThan}}] [--aws-region {{.LessThan}}region{{.GreaterThan}}] [--aws-creds-type {{.LessThan}}creds-type{{.GreaterThan}}] [--aws-creds-file {{.LessThan}}file{{.GreaterThan}}] [--aws-creds-profile {{.LessThan}}profile{{.GreaterThan}}] {{.LessThan}}remote-url{{.GreaterThan}} {{.LessThan}}new-dir{{.GreaterThan}}",
},
}
type CloneCmd struct{}
@@ -80,7 +79,7 @@ func (cmd CloneCmd) RequiresRepo() bool {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CloneCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, cloneShortDesc, cloneLongDesc, cloneSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, cloneDocs, ap))
}
func (cmd CloneCmd) createArgParser() *argparser.ArgParser {
@@ -102,7 +101,7 @@ func (cmd CloneCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd CloneCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, cloneShortDesc, cloneLongDesc, cloneSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, cloneDocs, ap))
apr := cli.ParseArgs(ap, args, help)
remoteName := apr.GetValueOrDefault(remoteParam, "origin")
+8 -6
View File
@@ -36,10 +36,12 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/iohelp"
)
var catShortDesc = "print conflicts"
var catLongDesc = `The dolt conflicts cat command reads table conflicts and writes them to the standard output.`
var catSynopsis = []string{
"[<commit>] <table>...",
var catDocs = cli.CommandDocumentationContent{
ShortDesc: "print conflicts",
LongDesc: `The dolt conflicts cat command reads table conflicts and writes them to the standard output.`,
Synopsis: []string{
"[{{.LessThan}}commit{{.GreaterThan}}] {{.LessThan}}table{{.GreaterThan}}...",
},
}
type CatCmd struct{}
@@ -57,7 +59,7 @@ func (cmd CatCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CatCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, catShortDesc, catLongDesc, catSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, catDocs, ap))
}
// EventType returns the type of the event to log
@@ -75,7 +77,7 @@ func (cmd CatCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd CatCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, catShortDesc, catLongDesc, catSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, catDocs, ap))
apr := cli.ParseArgs(ap, args, help)
args = apr.Args()
+15 -15
View File
@@ -30,19 +30,19 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
)
var resShortDesc = "Removes rows from list of conflicts"
var resLongDesc = "When a merge operation finds conflicting changes, the rows with the conflicts are added to list " +
"of conflicts that must be resolved. Once the value for the row is resolved in the working set of tables, then " +
"the conflict should be resolved.\n" +
"\n" +
"In it's first form <b>dolt conflicts resolve <table> <key>...</b>, resolve runs in manual merge mode resolving " +
"the conflicts whose keys are provided.\n" +
"\n" +
"In it's second form <b>dolt conflicts resolve --ours|--theirs <table>...</b>, resolve runs in auto resolve mode. " +
"where conflicts are resolved using a rule to determine which version of a row should be used."
var resSynopsis = []string{
"<table> [<key_definition>] <key>...",
"--ours|--theirs <table>...",
var resDocumentation = cli.CommandDocumentationContent{
ShortDesc: "Removes rows from list of conflicts",
LongDesc: `
When a merge operation finds conflicting changes, the rows with the conflicts are added to list of conflicts that must be resolved. Once the value for the row is resolved in the working set of tables, then the conflict should be resolved.
In it's first form {{.EmphasisLeft}}dolt conflicts resolve <table> <key>...{{.EmphasisRight}}, resolve runs in manual merge mode resolving the conflicts whose keys are provided.
In it's second form {{.EmphasisLeft}}dolt conflicts resolve --ours|--theirs <table>...{{.EmphasisRight}}, resolve runs in auto resolve mode. Where conflicts are resolved using a rule to determine which version of a row should be used.
`,
Synopsis: []string{
`{{.LessThan}}table{{.GreaterThan}} [{{.LessThan}}key_definition{{.GreaterThan}}] {{.LessThan}}key{{.GreaterThan}}...`,
`--ours|--theirs {{.LessThan}}table{{.GreaterThan}}...`,
},
}
const (
@@ -79,7 +79,7 @@ func (cmd ResolveCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ResolveCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, resShortDesc, resLongDesc, resSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, resDocumentation, ap))
}
// EventType returns the type of the event to log
@@ -100,7 +100,7 @@ func (cmd ResolveCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd ResolveCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, resShortDesc, resLongDesc, resSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, resDocumentation, ap))
apr := cli.ParseArgs(ap, args, help)
var verr errhand.VerboseError
+17 -17
View File
@@ -41,20 +41,20 @@ const (
commitMessageArg = "message"
)
var commitShortDesc = `Record changes to the repository`
var commitLongDesc = "Stores the current contents of the staged tables in a new commit along with a log message from the " +
"user describing the changes.\n" +
"\n" +
"The content to be added can be specified by using dolt add to incrementally \"add\" changes to the staged tables " +
"before using the commit command (Note: even modified files must be \"added\");" +
"\n" +
"The log message can be added with the parameter -m <msg>. If the -m parameter is not provided an editor will be " +
"opened where you can review the commit and provide a log message.\n" +
"\n" +
"The commit timestamp can be modified using the --date parameter. Dates can be specified in the formats YYYY-MM-DD " +
"YYYY-MM-DDTHH:MM:SS, or YYYY-MM-DDTHH:MM:SSZ07:00 (where 07:00 is the time zone offset)."
var commitSynopsis = []string{
"[options]",
var commitDocs = cli.CommandDocumentationContent{
ShortDesc: "Record changes to the repository",
LongDesc: `
Stores the current contents of the staged tables in a new commit along with a log message from the user describing the changes.
The content to be added can be specified by using dolt add to incrementally \"add\" changes to the staged tables before using the commit command (Note: even modified files must be \"added\").
The log message can be added with the parameter {{.EmphasisLeft}}-m <msg>{{.EmphasisRight}}. If the {{.LessThan}}-m{{.GreaterThan}} parameter is not provided an editor will be opened where you can review the commit and provide a log message.
The commit timestamp can be modified using the --date parameter. Dates can be specified in the formats {{.LessThan}}YYYY-MM-DD{{.GreaterThan}}, {{.LessThan}}YYYY-MM-DDTHH:MM:SS{{.GreaterThan}}, or {{.LessThan}}YYYY-MM-DDTHH:MM:SSZ07:00{{.GreaterThan}} (where {{.LessThan}}07:00{{.GreaterThan}} is the time zone offset)."
`,
Synopsis: []string{
"[options]",
},
}
type CommitCmd struct{}
@@ -72,12 +72,12 @@ func (cmd CommitCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CommitCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, commitShortDesc, commitLongDesc, commitSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, commitDocs, ap))
}
func (cmd CommitCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsString(commitMessageArg, "m", "msg", "Use the given <msg> as the commit message.")
ap.SupportsString(commitMessageArg, "m", "msg", "Use the given {{.LessThan}}msg{{.GreaterThan}} as the commit message.")
ap.SupportsFlag(allowEmptyFlag, "", "Allow recording a commit that has the exact same data as its sole parent. This is usually a mistake, so it is disabled by default. This option bypasses that safety.")
ap.SupportsString(dateParam, "", "date", "Specify the date used in the commit. If not specified the current system time is used.")
return ap
@@ -86,7 +86,7 @@ func (cmd CommitCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd CommitCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, commitShortDesc, commitLongDesc, commitSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, commitDocs, ap))
apr := cli.ParseArgs(ap, args, help)
msg, msgOk := apr.GetValue(commitMessageArg)
+16 -12
View File
@@ -38,17 +38,21 @@ const (
unsetOperationStr = "unset"
)
var cfgShortDesc = `Get and set repository or global options`
var cfgLongDesc = `You can query/set/replace/unset options with this command.
var cfgDocs = cli.CommandDocumentationContent{
ShortDesc: `Get and set repository or global options`,
LongDesc: `You can query/set/replace/unset options with this command.
When reading, the values are read from the global and repository local configuration files, and options {{.LessThan}}--global{{.GreaterThan}}, and {{.LessThan}}--local{{.GreaterThan}} can be used to tell the command to read from only that location.
When writing, the new value is written to the repository local configuration file by default, and options {{.LessThan}}--global{{.GreaterThan}}, can be used to tell the command to write to that location (you can say {{.LessThan}}--local{{.GreaterThan}} but that is the default).
`,
When reading, the values are read from the global and repository local configuration files, and options --global, and --local can be used to tell the command to read from only that location.
When writing, the new value is written to the repository local configuration file by default, and options --global, can be used to tell the command to write to that location (you can say --local but that is the default).`
var cfgSynopsis = []string{
"[--global|--local] --list",
"[--global|--local] --add <name> <value>",
"[--global|--local] --get <name>",
"[--global|--local] --unset <name>...",
Synopsis: []string{
`[--global|--local] --list`,
`[--global|--local] --add {{.LessThan}}name{{.GreaterThan}} {{.LessThan}}value{{.GreaterThan}}`,
`[--global|--local] --get {{.LessThan}}name{{.GreaterThan}}`,
`[--global|--local] --unset {{.LessThan}}name{{.GreaterThan}}...`,
},
}
type ConfigCmd struct{}
@@ -72,7 +76,7 @@ func (cmd ConfigCmd) RequiresRepo() bool {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ConfigCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, cfgShortDesc, cfgLongDesc, cfgSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, cfgDocs, ap))
}
func (cmd ConfigCmd) createArgParser() *argparser.ArgParser {
@@ -90,7 +94,7 @@ func (cmd ConfigCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd ConfigCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, cfgShortDesc, cfgLongDesc, cfgSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, cfgDocs, ap))
apr := cli.ParseArgs(ap, args, help)
cfgTypes := apr.FlagsEqualTo([]string{globalParamName, localParamName}, true)
+9 -3
View File
@@ -32,7 +32,13 @@ import (
var checkShortDesc = "Check authenticating with a credential keypair against a doltremoteapi."
var checkLongDesc = `Tests calling a doltremoteapi with dolt credentials and reports the authentication result.`
var checkSynopsis = []string{"[--endpoint doltremoteapi.dolthub.com:443] [--creds <eak95022q3vskvumn2fcrpibdnheq1dtr8t...>]"}
var checkSynopsis = []string{"[--endpoint doltremoteapi.dolthub.com:443] [--creds {{.LessThan}}eak95022q3vskvumn2fcrpibdnheq1dtr8t...{{.GreaterThan}}]"}
var checkDocs = cli.CommandDocumentationContent{
ShortDesc: "Check authenticating with a credential keypair against a doltremoteapi.",
LongDesc: `Tests calling a doltremoteapi with dolt credentials and reports the authentication result.`,
Synopsis: []string{"[--endpoint doltremoteapi.dolthub.com:443] [--creds {{.LessThan}}eak95022q3vskvumn2fcrpibdnheq1dtr8t...{{.GreaterThan}}]"},
}
type CheckCmd struct{}
@@ -49,7 +55,7 @@ func (cmd CheckCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CheckCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, checkShortDesc, checkLongDesc, checkSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, checkDocs, ap))
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
@@ -73,7 +79,7 @@ func (cmd CheckCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd CheckCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, lsShortDesc, lsLongDesc, lsSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, checkDocs, ap))
apr := cli.ParseArgs(ap, args, help)
endpoint := loadEndpoint(dEnv, apr)
+9 -7
View File
@@ -30,11 +30,13 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var lsShortDesc = "List keypairs available for authenticating with doltremoteapi."
var lsLongDesc = `Lists known public keys from keypairs for authenticating with doltremoteapi.
var lsDocs = cli.CommandDocumentationContent{
ShortDesc: "List keypairs available for authenticating with doltremoteapi.",
LongDesc: `Lists known public keys from keypairs for authenticating with doltremoteapi.
The currently selected keypair appears with a '*' next to it.`
var lsSynopsis = []string{"[-v | --verbose]"}
The currently selected keypair appears with a {{.EmphasisLeft}}*{{.EmphasisRight}} next to it.`,
Synopsis: []string{"[-v | --verbose]"},
}
var lsVerbose = false
@@ -47,13 +49,13 @@ func (cmd LsCmd) Name() string {
// Description returns a description of the command
func (cmd LsCmd) Description() string {
return lsShortDesc
return lsDocs.ShortDesc
}
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd LsCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, lsShortDesc, lsLongDesc, lsSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, lsDocs, ap))
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
@@ -76,7 +78,7 @@ func (cmd LsCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd LsCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, lsShortDesc, lsLongDesc, lsSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, lsDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.Contains("verbose") {
+9 -8
View File
@@ -29,12 +29,13 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var newShortDesc = "Create a new public/private keypair for authenticating with doltremoteapi."
var newLongDesc = `Creates a new keypair for authenticating with doltremoteapi.
var newDocs = cli.CommandDocumentationContent{
ShortDesc: "Create a new public/private keypair for authenticating with doltremoteapi.",
LongDesc: `Creates a new keypair for authenticating with doltremoteapi.
Prints the public portion of the keypair, which can entered into the credentials
settings page of dolthub.`
var newSynopsis = []string{}
Prints the public portion of the keypair, which can entered into the credentials settings page of dolthub.`,
Synopsis: []string{},
}
type NewCmd struct{}
@@ -45,13 +46,13 @@ func (cmd NewCmd) Name() string {
// Description returns a description of the command
func (cmd NewCmd) Description() string {
return newShortDesc
return newDocs.ShortDesc
}
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd NewCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, newShortDesc, newLongDesc, newSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, newDocs, ap))
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
@@ -73,7 +74,7 @@ func (cmd NewCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd NewCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, newShortDesc, newLongDesc, newSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, newDocs, ap))
cli.ParseArgs(ap, args, help)
_, newCreds, verr := actions.NewCredsFile(dEnv)
+8 -6
View File
@@ -28,9 +28,11 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var rmShortDesc = "Remove a stored public/private keypair."
var rmLongDesc = `Removes an existing keypair from dolt's credential storage.`
var rmSynopsis = []string{"<public_key_as_appears_in_ls>"}
var rmDocs = cli.CommandDocumentationContent{
ShortDesc: "Remove a stored public/private keypair.",
LongDesc: `Removes an existing keypair from dolt's credential storage.`,
Synopsis: []string{"{{.LessThan}}public_key_as_appears_in_ls{{.GreaterThan}}"},
}
type RmCmd struct{}
@@ -41,13 +43,13 @@ func (cmd RmCmd) Name() string {
// Description returns a description of the command
func (cmd RmCmd) Description() string {
return rmShortDesc
return rmDocs.ShortDesc
}
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd RmCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, rmShortDesc, rmLongDesc, rmSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, rmDocs, ap))
}
func (cmd RmCmd) createArgParser() *argparser.ArgParser {
@@ -69,7 +71,7 @@ func (cmd RmCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd RmCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, rmShortDesc, rmLongDesc, rmSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, rmDocs, ap))
apr := cli.ParseArgs(ap, args, help)
args = apr.Args()
+9 -7
View File
@@ -28,15 +28,17 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var useShortDesc = "Select an existing dolt credential for authenticating with doltremoteapi."
var useLongDesc = `Selects an existing dolt credential for authenticating with doltremoteapi.
var useDocs = cli.CommandDocumentationContent{
ShortDesc: "Select an existing dolt credential for authenticating with doltremoteapi.",
LongDesc: `Selects an existing dolt credential for authenticating with doltremoteapi.
Can be given a credential's public key or key id and will update global dolt
config to use the credential when interacting with doltremoteapi.
You can see your available credentials with 'dolt creds ls'.`
You can see your available credentials with 'dolt creds ls'.`,
var useSynopsis = []string{"<public_key_as_appears_in_ls | public_key_id_as_appears_in_ls"}
Synopsis: []string{"{{.LessThan}}public_key_as_appears_in_ls | public_key_id_as_appears_in_ls{{.GreaterThan}}"},
}
type UseCmd struct{}
@@ -47,13 +49,13 @@ func (cmd UseCmd) Name() string {
// Description returns a description of the command
func (cmd UseCmd) Description() string {
return useShortDesc
return useDocs.ShortDesc
}
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd UseCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, useShortDesc, useLongDesc, useSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, useDocs, ap))
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
@@ -75,7 +77,7 @@ func (cmd UseCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd UseCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, useShortDesc, useLongDesc, useSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, useDocs, ap))
apr := cli.ParseArgs(ap, args, help)
args = apr.Args()
if len(args) != 1 {
+20 -18
View File
@@ -77,26 +77,28 @@ type DiffSink interface {
Close() error
}
var diffShortDesc = "Show changes between commits, commit and working tree, etc"
var diffLongDesc = `Show changes between the working and staged tables, changes between the working tables and the tables within a commit, or changes between tables at two commits.
var diffDocs = cli.CommandDocumentationContent{
ShortDesc: "Show changes between commits, commit and working tree, etc",
LongDesc: `
Show changes between the working and staged tables, changes between the working tables and the tables within a commit, or changes between tables at two commits.
dolt diff [--options] [<tables>...]
{{.EmphasisLeft}}dolt diff [--options] [<tables>...]{{.EmphasisRight}}
This form is to view the changes you made relative to the staging area for the next commit. In other words, the differences are what you could tell Dolt to further add but you still haven't. You can stage these changes by using dolt add.
dolt diff [--options] <commit> [<tables>...]
This form is to view the changes you have in your working tables relative to the named <commit>. You can use HEAD to compare it with the latest commit, or a branch name to compare with the tip of a different branch.
{{.EmphasisLeft}}dolt diff [--options] <commit> [<tables>...]{{.EmphasisRight}}
This form is to view the changes you have in your working tables relative to the named {{.LessThan}}commit{{.GreaterThan}}. You can use HEAD to compare it with the latest commit, or a branch name to compare with the tip of a different branch.
dolt diff [--options] <commit> <commit> [<tables>...]
This is to view the changes between two arbitrary <commit>.
{{.EmphasisLeft}}dolt diff [--options] <commit> <commit> [<tables>...]{{.EmphasisRight}}
This is to view the changes between two arbitrary {{.EmphasisLeft}}commit{{.EmphasisRight}}.
The diffs displayed can be limited to show the first N by providing the parameter <b>--limit N</b> where N is the number of diffs to display.
The diffs displayed can be limited to show the first N by providing the parameter {{.EmphasisLeft}}--limit N{{.EmphasisRight}} where {{.EmphasisLeft}}N{{.EmphasisRight}} is the number of diffs to display.
In order to filter which diffs are displayed <b>--where key=value</b> can be used. The key in this case would be either to_COLUMN_NAME or from_COLUMN_NAME. where from_COLUMN_NAME=value would filter based on the original value and to_COLUMN_NAME would select based on its updated value.
`
var diffSynopsis = []string{
"[options] [<commit>] [<tables>...]",
"[options] <commit> <commit> [<tables>...]",
In order to filter which diffs are displayed {{.EmphasisLeft}}--where key=value{{.EmphasisRight}} can be used. The key in this case would be either {{.EmphasisLeft}}to_COLUMN_NAME{{.EmphasisRight}} or {{.EmphasisLeft}}from_COLUMN_NAME{{.EmphasisRight}}. where {{.EmphasisLeft}}from_COLUMN_NAME=value{{.EmphasisRight}} would filter based on the original value and {{.EmphasisLeft}}to_COLUMN_NAME{{.EmphasisRight}} would select based on its updated value.
`,
Synopsis: []string{
`[options] [{{.LessThan}}commit{{.GreaterThan}}] [{{.LessThan}}tables{{.GreaterThan}}...]`,
`[options] {{.LessThan}}commit{{.GreaterThan}} {{.LessThan}}commit{{.GreaterThan}} [{{.LessThan}}tables{{.GreaterThan}}...]`,
},
}
type diffArgs struct {
@@ -126,7 +128,7 @@ func (cmd DiffCmd) EventType() eventsapi.ClientEventType {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd DiffCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, diffShortDesc, diffLongDesc, diffSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, diffDocs, ap))
}
func (cmd DiffCmd) createArgParser() *argparser.ArgParser {
@@ -134,8 +136,8 @@ func (cmd DiffCmd) createArgParser() *argparser.ArgParser {
ap.SupportsFlag(DataFlag, "d", "Show only the data changes, do not show the schema changes (Both shown by default).")
ap.SupportsFlag(SchemaFlag, "s", "Show only the schema changes, do not show the data changes (Both shown by default).")
ap.SupportsFlag(SummaryFlag, "", "Show summary of data changes")
ap.SupportsFlag(SQLFlag, "q", "Output diff as a SQL patch file of INSERT / UPDATE / DELETE statements")
ap.SupportsString(whereParam, "", "column", "filters columns based on values in the diff. See dolt diff --help for details.")
ap.SupportsFlag(SQLFlag, "q", "Output diff as a SQL patch file of {{.EmphasisLeft}}INSERT{{.EmphasisRight}} / {{.EmphasisLeft}}UPDATE{{.EmphasisRight}} / {{.EmphasisLeft}}DELETE{{.EmphasisRight}} statements")
ap.SupportsString(whereParam, "", "column", "filters columns based on values in the diff. See {{.EmphasisLeft}}dolt diff --help{{.EmphasisRight}} for details.")
ap.SupportsInt(limitParam, "", "record_count", "limits to the first N diffs.")
return ap
}
@@ -143,7 +145,7 @@ func (cmd DiffCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd DiffCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, _ := cli.HelpAndUsagePrinters(commandStr, diffShortDesc, diffLongDesc, diffSynopsis, ap)
help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, diffDocs, ap))
apr := cli.ParseArgs(ap, args, help)
diffParts := SchemaAndDataDiff
+20 -34
View File
@@ -17,7 +17,6 @@ package commands
import (
"context"
"fmt"
"io"
"path/filepath"
"strings"
@@ -67,7 +66,7 @@ func (cmd *DumpDocsCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr stri
func (cmd *DumpDocsCmd) Exec(_ context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := argparser.NewArgParser()
ap.SupportsString(dirParamName, "", "dir", "The directory where the md files should be dumped")
help, usage := cli.HelpAndUsagePrinters(commandStr, initShortDesc, initLongDesc, initSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, cli.CommandDocumentationContent{}, ap))
apr := cli.ParseArgs(ap, args, help)
dirStr := apr.GetValueOrDefault(dirParamName, ".")
@@ -84,25 +83,7 @@ func (cmd *DumpDocsCmd) Exec(_ context.Context, commandStr string, args []string
return 1
}
indexPath := filepath.Join(dirStr, "command_line_index.md")
idxWr, err := dEnv.FS.OpenForWrite(indexPath)
if err != nil {
verr := errhand.BuildDError("error writing to command_line.md").AddCause(err).Build()
cli.PrintErrln(verr.Verbose())
return 1
}
defer idxWr.Close()
err = iohelp.WriteAll(idxWr, []byte("# Dolt Commands\n"))
if err != nil {
verr := errhand.BuildDError("error writing to command_line.md").AddCause(err).Build()
cli.PrintErrln(verr.Verbose())
return 1
}
err = cmd.dumpDocs(idxWr, dEnv, dirStr, cmd.DoltCommand.Name(), cmd.DoltCommand.Subcommands)
err := cmd.dumpDocs(dEnv, dirStr, cmd.DoltCommand.Name(), cmd.DoltCommand.Subcommands)
if err != nil {
verr := errhand.BuildDError("error: Failed to dump docs.").AddCause(err).Build()
@@ -114,7 +95,7 @@ func (cmd *DumpDocsCmd) Exec(_ context.Context, commandStr string, args []string
return 0
}
func (cmd *DumpDocsCmd) dumpDocs(idxWr io.Writer, dEnv *env.DoltEnv, dirStr, cmdStr string, subCommands []cli.Command) error {
func (cmd *DumpDocsCmd) dumpDocs(dEnv *env.DoltEnv, dirStr, cmdStr string, subCommands []cli.Command) error {
for _, curr := range subCommands {
var hidden bool
if hidCmd, ok := curr.(cli.HiddenCommand); ok {
@@ -123,25 +104,18 @@ func (cmd *DumpDocsCmd) dumpDocs(idxWr io.Writer, dEnv *env.DoltEnv, dirStr, cmd
if !hidden {
if subCmdHandler, ok := curr.(cli.SubCommandHandler); ok {
err := cmd.dumpDocs(idxWr, dEnv, dirStr, cmdStr+" "+subCmdHandler.Name(), subCmdHandler.Subcommands)
err := cmd.dumpDocs(dEnv, dirStr, cmdStr+" "+subCmdHandler.Name(), subCmdHandler.Subcommands)
if err != nil {
return err
}
} else {
currCmdStr := cmdStr + " " + curr.Name()
filename := strings.ReplaceAll(currCmdStr, " ", "_")
filename = strings.ReplaceAll(filename, "-", "_")
currCmdStr := fmt.Sprintf("%s %s", cmdStr, curr.Name())
filename := strings.ReplaceAll(currCmdStr, " ", "-")
absPath := filepath.Join(dirStr, filename+".md")
indexLine := fmt.Sprintf("* [%s](%s)\n", currCmdStr, filename)
err := iohelp.WriteAll(idxWr, []byte(indexLine))
if err != nil {
return err
}
err = curr.CreateMarkdown(dEnv.FS, absPath, currCmdStr)
err := curr.CreateMarkdown(dEnv.FS, absPath, currCmdStr)
if err != nil {
return err
@@ -152,3 +126,15 @@ func (cmd *DumpDocsCmd) dumpDocs(idxWr io.Writer, dEnv *env.DoltEnv, dirStr, cmd
return nil
}
func CreateMarkdown(fs filesys.Filesys, path string, cmdDoc cli.CommandDocumentation) error {
markdownDoc, err := cmdDoc.CmdDocToMd()
if err != nil {
return err
}
wr, err := fs.OpenForWrite(path)
if err != nil {
return err
}
return iohelp.WriteIfNoErr(wr, []byte(markdownDoc), err)
}
+14 -12
View File
@@ -30,16 +30,18 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var fetchShortDesc = "Download objects and refs from another repository"
var fetchLongDesc = "Fetch refs, along with the objects necessary to complete their histories and update " +
"remote-tracking branches." +
"\n" +
"\n By default dolt will attempt to fetch from a remote named 'origin'. The <remote> parameter allows you to " +
"specify the name of a different remote you wish to pull from by the remote's name." +
"\n" +
"\nWhen no refspec(s) are specified on the command line, the fetch_specs for the default remote are used."
var fetchSynopsis = []string{
"[<remote>] [<refspec> ...]",
var fetchDocs = cli.CommandDocumentationContent{
ShortDesc: "Download objects and refs from another repository",
LongDesc: `Fetch refs, along with the objects necessary to complete their histories and update remote-tracking branches.
By default dolt will attempt to fetch from a remote named {{.EmphasisLeft}}origin{{.EmphasisRight}}. The {{.LessThan}}remote{{.GreaterThan}} parameter allows you to specify the name of a different remote you wish to pull from by the remote's name.
When no refspec(s) are specified on the command line, the fetch_specs for the default remote are used.
`,
Synopsis: []string{
"[{{.LessThan}}remote{{.GreaterThan}}] [{{.LessThan}}refspec{{.GreaterThan}} ...]",
},
}
type FetchCmd struct{}
@@ -62,7 +64,7 @@ func (cmd FetchCmd) EventType() eventsapi.ClientEventType {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd FetchCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, fetchShortDesc, fetchLongDesc, fetchSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, fetchDocs, ap))
}
func (cmd FetchCmd) createArgParser() *argparser.ArgParser {
@@ -73,7 +75,7 @@ func (cmd FetchCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd FetchCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := argparser.NewArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, fetchShortDesc, fetchLongDesc, fetchSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, fetchDocs, ap))
apr := cli.ParseArgs(ap, args, help)
remotes, _ := dEnv.GetRemotes()
+14 -10
View File
@@ -16,6 +16,7 @@ package commands
import (
"context"
"fmt"
"time"
"github.com/fatih/color"
@@ -33,12 +34,16 @@ const (
usernameParamName = "name"
)
var initShortDesc = "Create an empty Dolt data repository"
var initLongDesc = `This command creates an empty Dolt data repository in the current directory.
var initDocs = cli.CommandDocumentationContent{
ShortDesc: "Create an empty Dolt data repository",
LongDesc: `This command creates an empty Dolt data repository in the current directory.
Running dolt init in an already initialized directory will fail.`
var initSynopsis = []string{
"[<options>] [<path>]",
Running dolt init in an already initialized directory will fail.
`,
Synopsis: []string{
//`[{{.LessThan}}options{{.GreaterThan}}] [{{.LessThan}}path{{.GreaterThan}}]`,
},
}
type InitCmd struct{}
@@ -62,14 +67,13 @@ func (cmd InitCmd) RequiresRepo() bool {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd InitCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, initShortDesc, initLongDesc, initSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, initDocs, ap))
}
func (cmd InitCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsString(usernameParamName, "", "name", "The name used in commits to this repo. If not provided will be taken from \""+env.UserNameKey+"\" in the global config.")
ap.SupportsString(emailParamName, "", "email", "The email address used. If not provided will be taken from \""+env.UserEmailKey+"\" in the global config.")
ap.SupportsString(usernameParamName, "", "name", fmt.Sprintf("The name used in commits to this repo. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", env.UserNameKey))
ap.SupportsString(emailParamName, "", "email", fmt.Sprintf("The email address used. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", env.UserEmailKey))
ap.SupportsString(dateParam, "", "date", "Specify the date used in the initial commit. If not specified the current system time is used.")
return ap
@@ -78,7 +82,7 @@ func (cmd InitCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd InitCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, initShortDesc, initLongDesc, initSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, initDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if dEnv.HasDoltDir() {
+9 -8
View File
@@ -35,13 +35,14 @@ const (
numLinesParam = "number"
)
var logShortDesc = `Show commit logs`
var logLongDesc = "Shows the commit logs.\n" +
"\n" +
"The command takes options to control what is shown and how."
var logDocs = cli.CommandDocumentationContent{
ShortDesc: `Show commit logs`,
LongDesc: `Shows the commit logs
var logSynopsis = []string{
"[-n <num_commits>] [<commit>]",
The command takes options to control what is shown and how.`,
Synopsis: []string{
`[-n {{.LessThan}}num_commits{{.GreaterThan}}] [{{.LessThan}}commit{{.GreaterThan}}]`,
},
}
type commitLoggerFunc func(*doltdb.CommitMeta, []hash.Hash, hash.Hash)
@@ -100,7 +101,7 @@ func (cmd LogCmd) EventType() eventsapi.ClientEventType {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd LogCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := createLogArgParser()
return cli.CreateMarkdown(fs, path, commandStr, logShortDesc, logLongDesc, logSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, logDocs, ap))
}
func createLogArgParser() *argparser.ArgParser {
@@ -116,7 +117,7 @@ func (cmd LogCmd) Exec(ctx context.Context, commandStr string, args []string, dE
func logWithLoggerFunc(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, loggerFunc commitLoggerFunc) int {
ap := createLogArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, logShortDesc, logLongDesc, logSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, logDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() > 1 {
+7 -6
View File
@@ -36,10 +36,11 @@ const (
loginRetryInterval = 5
)
var loginShortDesc = ""
var loginLongDesc = ""
var loginSynopsis = []string{
"[<creds>]",
var loginDocs = cli.CommandDocumentationContent{
ShortDesc: "Login to DoltHub",
LongDesc: `Login into DoltHub using the email in your config so you can pull from private repos and push to those you have permission to.
`,
Synopsis: []string{"[{{.LessThan}}creds{{.GreaterThan}}]"},
}
type LoginCmd struct{}
@@ -63,7 +64,7 @@ func (cmd LoginCmd) RequiresRepo() bool {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd LoginCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, loginShortDesc, loginLongDesc, loginSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, loginDocs, ap))
}
func (cmd LoginCmd) createArgParser() *argparser.ArgParser {
@@ -80,7 +81,7 @@ func (cmd LoginCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, loginShortDesc, loginLongDesc, loginSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, loginDocs, ap))
apr := cli.ParseArgs(ap, args, help)
var verr errhand.VerboseError
+14 -14
View File
@@ -36,18 +36,18 @@ const (
systemFlag = "system"
)
var lsShortDesc = "List tables"
var lsLongDesc = "With no arguments lists the tables in the current working set but if a commit is specified it will list " +
"the tables in that commit. If the --verbose flag is provided a row count and a hash of the table will also be " +
"displayed.\n" +
"\n" +
"If the --system flag is supplied this will show the dolt system tables which are queryable with SQL. Some system " +
"tables can be queried even if they are not in the working set by specifying appropriate parameters in the SQL " +
"queries. To see these tables too you may pass the --verbose flag.\n" +
"\n" +
"If the --all flag is supplied both user and system tables will be printed."
var lsSynopsis = []string{
"[--options] [<commit>]",
var lsDocs = cli.CommandDocumentationContent{
ShortDesc: "List tables",
LongDesc: `With no arguments lists the tables in the current working set but if a commit is specified it will list the tables in that commit. If the {{.EmphasisLeft}}--verbose{{.EmphasisRight}} flag is provided a row count and a hash of the table will also be displayed.
If the {{.EmphasisLeft}}--system{{.EmphasisRight}} flag is supplied this will show the dolt system tables which are queryable with SQL. Some system tables can be queried even if they are not in the working set by specifying appropriate parameters in the SQL queries. To see these tables too you may pass the {{.EmphasisLeft}}--verbose{{.EmphasisRight}} flag.
If the {{.EmphasisLeft}}--all{{.EmphasisRight}} flag is supplied both user and system tables will be printed.
`,
Synopsis: []string{
"[--options] [{{.LessThan}}commit{{.GreaterThan}}]",
},
}
type LsCmd struct{}
@@ -65,7 +65,7 @@ func (cmd LsCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd LsCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, lsShortDesc, lsLongDesc, lsSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, lsDocs, ap))
}
func (cmd LsCmd) createArgParser() *argparser.ArgParser {
@@ -84,7 +84,7 @@ func (cmd LsCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd LsCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, lsShortDesc, lsLongDesc, lsSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, lsDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() > 1 {
+68 -34
View File
@@ -32,34 +32,32 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/ref"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
"github.com/liquidata-inc/dolt/go/store/hash"
)
const (
abortParam = "abort"
)
var mergeShortDesc = "Join two or more development histories together"
var mergeLongDesc = "Incorporates changes from the named commits (since the time their histories diverged from the " +
"current branch) into the current branch.\n" +
"\n" +
"The second syntax (\"<b>dolt merge --abort</b>\") can only be run after the merge has resulted in conflicts. " +
"git merge --abort will abort the merge process and try to reconstruct the pre-merge state. However, if there were " +
"uncommitted changes when the merge started (and especially if those changes were further modified after the merge " +
"was started), dolt merge --abort will in some cases be unable to reconstruct the original (pre-merge) changes. " +
"Therefore: \n" +
"\n" +
"<b>Warning</b>: Running dolt merge with non-trivial uncommitted changes is discouraged: while possible, it may " +
"leave you in a state that is hard to back out of in the case of a conflict."
var mergeSynopsis = []string{
"<branch>",
"--abort",
var mergeDocs = cli.CommandDocumentationContent{
ShortDesc: "Join two or more development histories together",
LongDesc: `Incorporates changes from the named commits (since the time their histories diverged from the current branch) into the current branch.
The second syntax ({{.LessThan}}dolt merge --abort{{.GreaterThan}}) can only be run after the merge has resulted in conflicts. git merge {{.EmphasisLeft}}--abort{{.EmphasisRight}} will abort the merge process and try to reconstruct the pre-merge state. However, if there were uncommitted changes when the merge started (and especially if those changes were further modified after the merge was started), dolt merge {{.EmphasisLeft}}--abort{{.EmphasisRight}} will in some cases be unable to reconstruct the original (pre-merge) changes. Therefore:
{{.LessThan}}Warning{{.GreaterThan}}: Running dolt merge with non-trivial uncommitted changes is discouraged: while possible, it may leave you in a state that is hard to back out of in the case of a conflict.
`,
Synopsis: []string{
"{{.LessThan}}branch{{.GreaterThan}}",
"--abort",
},
}
var abortDetails = "Abort the current conflict resolution process, and try to reconstruct the pre-merge state.\n" +
"\n" +
"If there were uncommitted working set changes present when the merge started, dolt merge --abort will be " +
"unable to reconstruct these changes. It is therefore recommended to always commit or stash your changes before " +
"running git merge."
var abortDetails = `Abort the current conflict resolution process, and try to reconstruct the pre-merge state.
If there were uncommitted working set changes present when the merge started, {{.EmphasisLeft}}dolt merge --abort{{.EmphasisRight}} will be unable to reconstruct these changes. It is therefore recommended to always commit or stash your changes before running git merge.
`
type MergeCmd struct{}
@@ -76,7 +74,7 @@ func (cmd MergeCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd MergeCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, mergeShortDesc, mergeLongDesc, mergeSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, mergeDocs, ap))
}
func (cmd MergeCmd) createArgParser() *argparser.ArgParser {
@@ -93,7 +91,7 @@ func (cmd MergeCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd MergeCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, mergeShortDesc, mergeLongDesc, mergeSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, mergeDocs, ap))
apr := cli.ParseArgs(ap, args, help)
var verr errhand.VerboseError
@@ -132,7 +130,7 @@ func (cmd MergeCmd) Exec(ctx context.Context, commandStr string, args []string,
return 1
} else if dEnv.IsMergeActive() {
cli.Println("error: Merging is not possible because you have not committed an active merge.")
cli.Println("hint: add affected tables using 'dolt add <table>' and commit using 'dolt commit -m <msg>'")
cli.Println("hint: add affected tables using 'dolt add <table>' and commit using {{.EmphasisLeft}}dolt commit -m <msg>{{.EmphasisRight}}")
cli.Println("fatal: Exiting because of active merge")
return 1
}
@@ -192,7 +190,7 @@ func mergeBranch(ctx context.Context, dEnv *env.DoltEnv, dref ref.DoltRef) errha
cli.Println("Updating", h1.String()+".."+h2.String())
tblNames, err := dEnv.MergeWouldStompChanges(ctx, cm2)
tblNames, workingDiffs, err := dEnv.MergeWouldStompChanges(ctx, cm2)
if err != nil {
return errhand.BuildDError("error: failed to determine mergability.").AddCause(err).Build()
@@ -208,16 +206,29 @@ func mergeBranch(ctx context.Context, dEnv *env.DoltEnv, dref ref.DoltRef) errha
}
if ok, err := cm1.CanFastForwardTo(ctx, cm2); ok {
return executeFFMerge(ctx, dEnv, cm2)
return executeFFMerge(ctx, dEnv, cm2, workingDiffs)
} else if err == doltdb.ErrUpToDate || err == doltdb.ErrIsAhead {
cli.Println("Already up to date.")
return nil
} else {
return executeMerge(ctx, dEnv, cm1, cm2, dref)
return executeMerge(ctx, dEnv, cm1, cm2, dref, workingDiffs)
}
}
func executeFFMerge(ctx context.Context, dEnv *env.DoltEnv, cm2 *doltdb.Commit) errhand.VerboseError {
func applyChanges(ctx context.Context, root *doltdb.RootValue, workingDiffs map[string]hash.Hash) (*doltdb.RootValue, errhand.VerboseError) {
var err error
for tblName, h := range workingDiffs {
root, err = root.SetTableHash(ctx, tblName, h)
if err != nil {
return nil, errhand.BuildDError("error: Failed to update table '%s'.", tblName).AddCause(err).Build()
}
}
return root, nil
}
func executeFFMerge(ctx context.Context, dEnv *env.DoltEnv, cm2 *doltdb.Commit, workingDiffs map[string]hash.Hash) errhand.VerboseError {
cli.Println("Fast-forward")
rv, err := cm2.GetRootValue()
@@ -226,22 +237,36 @@ func executeFFMerge(ctx context.Context, dEnv *env.DoltEnv, cm2 *doltdb.Commit)
return errhand.BuildDError("error: failed to get root value").AddCause(err).Build()
}
h, err := dEnv.DoltDB.WriteRootValue(ctx, rv)
stagedHash, err := dEnv.DoltDB.WriteRootValue(ctx, rv)
if err != nil {
return errhand.BuildDError("Failed to write database").AddCause(err).Build()
}
workingHash := stagedHash
if len(workingDiffs) > 0 {
rv, err = applyChanges(ctx, rv, workingDiffs)
if err != nil {
return errhand.BuildDError("Failed to re-apply working changes.").AddCause(err).Build()
}
workingHash, err = dEnv.DoltDB.WriteRootValue(ctx, rv)
if err != nil {
return errhand.BuildDError("Failed to write database").AddCause(err).Build()
}
}
err = dEnv.DoltDB.FastForward(ctx, dEnv.RepoState.CWBHeadRef(), cm2)
if err != nil {
return errhand.BuildDError("Failed to write database").AddCause(err).Build()
}
dEnv.RepoState.Working = h.String()
dEnv.RepoState.Staged = h.String()
err = dEnv.RepoState.Save(dEnv.FS)
dEnv.RepoState.Working = workingHash.String()
dEnv.RepoState.Staged = stagedHash.String()
err = dEnv.RepoState.Save(dEnv.FS)
if err != nil {
return errhand.BuildDError("unable to execute repo state update.").
AddDetails(`As a result your .dolt/repo_state.json file may have invalid values for "staged" and "working".
@@ -261,7 +286,7 @@ and take the hash for your current branch and use it for the value for "staged"
return nil
}
func executeMerge(ctx context.Context, dEnv *env.DoltEnv, cm1, cm2 *doltdb.Commit, dref ref.DoltRef) errhand.VerboseError {
func executeMerge(ctx context.Context, dEnv *env.DoltEnv, cm1, cm2 *doltdb.Commit, dref ref.DoltRef, workingDiffs map[string]hash.Hash) errhand.VerboseError {
mergedRoot, tblToStats, err := merge.MergeCommits(ctx, dEnv.DoltDB, cm1, cm2)
if err != nil {
@@ -275,6 +300,15 @@ func executeMerge(ctx context.Context, dEnv *env.DoltEnv, cm1, cm2 *doltdb.Commi
}
}
workingRoot := mergedRoot
if len(workingDiffs) > 0 {
workingRoot, err = applyChanges(ctx, mergedRoot, workingDiffs)
if err != nil {
return errhand.BuildDError("").AddCause(err).Build()
}
}
h2, err := cm2.HashOf()
if err != nil {
@@ -287,7 +321,7 @@ func executeMerge(ctx context.Context, dEnv *env.DoltEnv, cm1, cm2 *doltdb.Commi
return errhand.BuildDError("Unable to update the repo state").AddCause(err).Build()
}
verr := UpdateWorkingWithVErr(dEnv, mergedRoot)
verr := UpdateWorkingWithVErr(dEnv, workingRoot)
if verr == nil {
hasConflicts := printSuccessStats(tblToStats)
+13 -13
View File
@@ -17,24 +17,24 @@ package commands
import (
"context"
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/ref"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var pullShortDesc = "Fetch from and integrate with another repository or a local branch"
var pullLongDesc = "Incorporates changes from a remote repository into the current branch. In its default mode, " +
"<b>dolt pull</b> is shorthand for <b>dolt fetch</b> followed by <b>dolt merge <remote>/<branch></b>." +
"\n" +
"\nMore precisely, dolt pull runs dolt fetch with the given parameters and calls dolt merge to merge the retrieved " +
"branch heads into the current branch."
var pullSynopsis = []string{
"<remote>",
var pullDocs = cli.CommandDocumentationContent{
ShortDesc: "Fetch from and integrate with another repository or a local branch",
LongDesc: `Incorporates changes from a remote repository into the current branch. In its default mode, {{.EmphasisLeft}}dolt pull{{.EmphasisRight}} is shorthand for {{.EmphasisLeft}}dolt fetch{{.EmphasisRight}} followed by {{.EmphasisLeft}}dolt merge <remote>/<branch>{{.EmphasisRight}}.
More precisely, dolt pull runs {{.EmphasisLeft}}dolt fetch{{.EmphasisRight}} with the given parameters and calls {{.EmphasisLeft}}dolt merge{{.EmphasisRight}} to merge the retrieved branch {{.EmphasisLeft}}HEAD{{.EmphasisRight}} into the current branch.
`,
Synopsis: []string{
"{{.LessThan}}remote{{.GreaterThan}}",
},
}
type PullCmd struct{}
@@ -52,7 +52,7 @@ func (cmd PullCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd PullCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, pullShortDesc, pullLongDesc, pullSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, pullDocs, ap))
}
func (cmd PullCmd) createArgParser() *argparser.ArgParser {
@@ -68,7 +68,7 @@ func (cmd PullCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd PullCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, pullShortDesc, pullLongDesc, pullSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, pullDocs, ap))
apr := cli.ParseArgs(ap, args, help)
branch := dEnv.RepoState.CWBHeadRef()
+15 -18
View File
@@ -41,23 +41,20 @@ const (
SetUpstreamFlag = "set-upstream"
)
var pushShortDesc = "Update remote refs along with associated objects"
var pushDocs = cli.CommandDocumentationContent{
ShortDesc: "Update remote refs along with associated objects",
LongDesc: `Updates remote refs using local refs, while sending objects necessary to complete the given refs.
var pushLongDesc = "Updates remote refs using local refs, while sending objects necessary to complete the given refs." +
"\n" +
"\nWhen the command line does not specify where to push with the <remote> argument, an attempt is made to infer the " +
"remote. If only one remote exists it will be used, if multiple remotes exists, a remote named 'origin' will be " +
"attempted. If there is more than one remote, and none of them are named 'origin' then the command will fail and " +
"you will need to specify the correct remote explicitly." +
"\n" +
"\nWhen the command line does not specify what to push with <refspec>... then the current branch will be used." +
"\n" +
"\nWhen neither the command-line does not specify what to push, the default behavior is used, which corresponds to the " +
"current branch being pushed to the corresponding upstream branch, but as a safety measure, the push is aborted if " +
"the upstream branch does not have the same name as the local one."
When the command line does not specify where to push with the {{.LessThan}}remote{{.GreaterThan}} argument, an attempt is made to infer the remote. If only one remote exists it will be used, if multiple remotes exists, a remote named 'origin' will be attempted. If there is more than one remote, and none of them are named 'origin' then the command will fail and you will need to specify the correct remote explicitly.
var pushSynopsis = []string{
"[-u | --set-upstream] [<remote>] [<refspec>]",
When the command line does not specify what to push with {{.LessThan}}refspec{{.GreaterThan}}... then the current branch will be used.
When neither the command-line does not specify what to push, the default behavior is used, which corresponds to the current branch being pushed to the corresponding upstream branch, but as a safety measure, the push is aborted if the upstream branch does not have the same name as the local one.
`,
Synopsis: []string{
"[-u | --set-upstream] [{{.LessThan}}remote{{.GreaterThan}}] [{{.LessThan}}refspec{{.GreaterThan}}]",
},
}
type PushCmd struct{}
@@ -75,12 +72,12 @@ func (cmd PushCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd PushCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, pushShortDesc, pushLongDesc, pushSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, pushDocs, ap))
}
func (cmd PushCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsFlag(SetUpstreamFlag, "u", "For every branch that is up to date or successfully pushed, add upstream (tracking) reference, used by argument-less dolt pull and other commands.")
ap.SupportsFlag(SetUpstreamFlag, "u", "For every branch that is up to date or successfully pushed, add upstream (tracking) reference, used by argument-less {{.EmphasisLeft}}dolt pull{{.EmphasisRight}} and other commands.")
return ap
}
@@ -92,7 +89,7 @@ func (cmd PushCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd PushCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, pushShortDesc, pushLongDesc, pushSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, pushDocs, ap))
apr := cli.ParseArgs(ap, args, help)
remotes, err := dEnv.GetRemotes()
+29 -37
View File
@@ -37,42 +37,34 @@ import (
var ErrInvalidPort = errors.New("invalid port")
var remoteShortDesc = "Manage set of tracked repositories"
var remoteLongDesc = "With no arguments, shows a list of existing remotes. Several subcommands are available to perform " +
"operations on the remotes." +
"\n" +
"\n<b>add</b>\n" +
"Adds a remote named <name> for the repository at <url>. The command dolt fetch <name> can " +
"then be used to create and update remote-tracking branches <name>/<branch>." +
"\n" +
"\nThe <url> parameter supports url schemes of http, https, aws, gs, and file. If a url scheme does not prefix the " +
"url then https is assumed. If the <url> paramenter is in the format <organization>/<repository> then dolt will use " +
"the remotes.default_host from your configuration file (Which will be dolthub.com unless changed).\n" +
"\n" +
"AWS cloud remote urls should be of the form aws://[dynamo-table:s3-bucket]/database. You may configure your aws " +
"cloud remote using the optional parameters aws-region, aws-creds-type, aws-creds-file.\n" +
"\n" +
"aws-creds-type specifies the means by which credentials should be retrieved in order to access the specified " +
"cloud resources (specifically the dynamo table, and the s3 bucket). Valid values are 'role', 'env', or 'file'.\n" +
"\n" +
"\trole: Use the credentials installed for the current user\n" +
"\tenv: Looks for environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY\n" +
"\tfile: Uses the credentials file specified by the parameter aws-creds-file\n" +
"\n" +
"GCP remote urls should be of the form gs://gcs-bucket/database and will use the credentials setup using the gcloud " +
"command line available from Google" +
"\n" +
"The local filesystem can be used as a remote by providing a repository url in the format file://absolute path. See" +
"https://en.wikipedia.org/wiki/File_URI_scheme for details." +
"\n" +
"\n<b>remove, rm</b>\n" +
"Remove the remote named <name>. All remote-tracking branches and configuration settings" +
"for the remote are removed."
var remoteDocs = cli.CommandDocumentationContent{
ShortDesc: "Manage set of tracked repositories",
LongDesc: `With no arguments, shows a list of existing remotes. Several subcommands are available to perform operations on the remotes.
var remoteSynopsis = []string{
"[-v | --verbose]",
"add [--aws-region <region>] [--aws-creds-type <creds-type>] [--aws-creds-file <file>] [--aws-creds-profile <profile>] <name> <url>",
"remove <name>",
{{.EmphasisLeft}}add{{.EmphasisRight}}
Adds a remote named {{.LessThan}}name{{.GreaterThan}} for the repository at {{.LessThan}}url{{.GreaterThan}}. The command dolt fetch {{.LessThan}}name{{.GreaterThan}} can then be used to create and update remote-tracking branches {{.EmphasisLeft}}<name>/<branch>{{.EmphasisRight}}.
The {{.LessThan}}url{{.GreaterThan}} parameter supports url schemes of http, https, aws, gs, and file. If a url scheme does not prefix the url then https is assumed. If the {{.LessThan}}url{{.GreaterThan}} paramenter is in the format {{.EmphasisLeft}}<organization>/<repository>{{.EmphasisRight}} then dolt will use the {{.EmphasisLeft}}remotes.default_host{{.EmphasisRight}} from your configuration file (Which will be dolthub.com unless changed).
AWS cloud remote urls should be of the form {{.EmphasisLeft}}aws://[dynamo-table:s3-bucket]/database{{.EmphasisRight}}. You may configure your aws cloud remote using the optional parameters {{.EmphasisLeft}}aws-region{{.EmphasisRight}}, {{.EmphasisLeft}}aws-creds-type{{.EmphasisRight}}, {{.EmphasisLeft}}aws-creds-file{{.EmphasisRight}}.
aws-creds-type specifies the means by which credentials should be retrieved in order to access the specified cloud resources (specifically the dynamo table, and the s3 bucket). Valid values are 'role', 'env', or 'file'.
\trole: Use the credentials installed for the current user
\tenv: Looks for environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
\tfile: Uses the credentials file specified by the parameter aws-creds-file
GCP remote urls should be of the form gs://gcs-bucket/database and will use the credentials setup using the gcloud command line available from Google +
The local filesystem can be used as a remote by providing a repository url in the format file://absolute path. See https://en.wikipedia.org/wiki/File_URI_schemethi
{{.EmphasisLeft}}remove{{.EmphasisRight}}, {{.EmphasisLeft}}rm{{.EmphasisRight}},
Remove the remote named {{.LessThan}}name{{.GreaterThan}}. All remote-tracking branches and configuration settings for the remote are removed.`,
Synopsis: []string{
"[-v | --verbose]",
"add [--aws-region {{.LessThan}}region{{.GreaterThan}}] [--aws-creds-type {{.LessThan}}creds-type{{.GreaterThan}}] [--aws-creds-file {{.LessThan}}file{{.GreaterThan}}] [--aws-creds-profile {{.LessThan}}profile{{.GreaterThan}}] {{.LessThan}}name{{.GreaterThan}} {{.LessThan}}url{{.GreaterThan}}",
"remove {{.LessThan}}name{{.GreaterThan}}",
},
}
const (
@@ -98,7 +90,7 @@ func (cmd RemoteCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd RemoteCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, remoteShortDesc, remoteLongDesc, remoteSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, remoteDocs, ap))
}
func (cmd RemoteCmd) createArgParser() *argparser.ArgParser {
@@ -122,7 +114,7 @@ func (cmd RemoteCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd RemoteCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, remoteShortDesc, remoteLongDesc, remoteSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, remoteDocs, ap))
apr := cli.ParseArgs(ap, args, help)
var verr errhand.VerboseError
+15 -13
View File
@@ -35,24 +35,26 @@ const (
HardResetParam = "hard"
)
var resetShortDesc = "Resets staged tables to their HEAD state"
var resetLongDesc = `Sets the state of a table in the staging area to be that table's value at HEAD
var resetDocContent = cli.CommandDocumentationContent{
ShortDesc: "Resets staged tables to their HEAD state",
LongDesc: `Sets the state of a table in the staging area to be that table's value at HEAD
dolt reset <tables>...
This form resets the values for all staged <tables> to their values at HEAD. (It does not affect the working tree or
{{.EmphasisLeft}}dolt reset <tables>...{{.EmphasisRight}}"
This form resets the values for all staged {{.LessThan}}tables{{.GreaterThan}} to their values at {{.EmphasisLeft}}HEAD{{.EmphasisRight}}. (It does not affect the working tree or
the current branch.)
This means that </b>dolt reset <tables></b> is the opposite of <b>dolt add <tables></b>.
This means that {{.EmphasisLeft}}dolt reset <tables>{{.EmphasisRight}} is the opposite of {{.EmphasisLeft}}dolt add <tables>{{.EmphasisRight}}.
After running <b>dolt reset <tables></b> to update the staged tables, you can use <b>dolt checkout</b> to check the
After running {{.EmphasisLeft}}dolt reset <tables>{{.EmphasisRight}} to update the staged tables, you can use {{.EmphasisLeft}}dolt checkout{{.EmphasisRight}} to check the
contents out of the staged tables to the working tables.
dolt reset .
This form resets <b>all</b> staged tables to their values at HEAD. It is the opposite of <b>dolt add .</b>`
This form resets {{.EmphasisLeft}}all{{.EmphasisRight}} staged tables to their values at HEAD. It is the opposite of {{.EmphasisLeft}}dolt add .{{.EmphasisRight}}`,
var resetSynopsis = []string{
"<tables>...",
"[--hard | --soft]",
Synopsis: []string{
"{{.LessThan}}tables{{.GreaterThan}}...",
"[--hard | --soft]",
},
}
type ResetCmd struct{}
@@ -70,12 +72,12 @@ func (cmd ResetCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ResetCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, resetShortDesc, resetLongDesc, resetSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, resetDocContent, ap))
}
func (cmd ResetCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsFlag(HardResetParam, "", "Resets the working tables and staged tables. Any changes to tracked tables in the working tree since <commit> are discarded.")
ap.SupportsFlag(HardResetParam, "", "Resets the working tables and staged tables. Any changes to tracked tables in the working tree since {{.LessThan}}commit{{.GreaterThan}} are discarded.")
ap.SupportsFlag(SoftResetParam, "", "Does not touch the working tables, but removes all tables staged to be committed.")
return ap
}
@@ -83,7 +85,7 @@ func (cmd ResetCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd ResetCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, resetShortDesc, resetLongDesc, resetSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, resetDocContent, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.ContainsArg(doltdb.DocTableName) {
+8 -6
View File
@@ -29,10 +29,12 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
)
var schExportShortDesc = "Exports a table's schema."
var schExportLongDesc = ""
var schExportSynopsis = []string{
"<table> <file>",
var schExportDocs = cli.CommandDocumentationContent{
ShortDesc: "Exports a table's schema.",
LongDesc: "",
Synopsis: []string{
"{{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}",
},
}
const (
@@ -56,7 +58,7 @@ func (cmd ExportCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ExportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, schExportShortDesc, schExportLongDesc, schExportSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, schExportDocs, ap))
}
func (cmd ExportCmd) createArgParser() *argparser.ArgParser {
@@ -77,7 +79,7 @@ func (cmd ExportCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd ExportCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, schExportShortDesc, schExportLongDesc, schExportSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, schExportDocs, ap))
apr := cli.ParseArgs(ap, args, help)
root, verr := commands.GetWorkingWithVErr(dEnv)
+28 -40
View File
@@ -54,40 +54,28 @@ const (
delimParam = "delim"
)
var schImportShortDesc = "Creates a new table with an inferred schema."
var schImportLongDesc = "If <b>--create | -c</b> is given the operation will create <table> with a schema that it infers" +
"from the supplied file. One or more primary key columns must be specified using the <b>--pks</b> parameter.\n" +
"\n" +
"If <b>--update | -u</b> is given the operation will update <table> any additional columns, or change the types of columns" +
"based on the file supplied. If the <b>--keep-types</b> parameter is supplied then the types for existing columns will" +
"not be modified, even if they differ from what is in the supplied file." +
"\n" +
"If <b>--replace | -r</b> is given the operation will replace <table> with a new, empty table which has a schema inferred from" +
"the supplied file but columns tags will be maintained across schemas. <b>--keep-types</b> can also be supplied here" +
"to guarantee that types are the same in the file and in the pre-existing table.\n" +
"\n" +
"A mapping file can be used to map fields between the file being imported and the table's schema being inferred. This can" +
"be used when creating a new table, or updating or replacing an existing table.\n" +
"\n" +
tblcmds.MappingFileHelp +
"\n" +
"In create, update, and replace scenarios the file's extension is used to infer the type of the file. If a file does not" +
"have the expected extension then the <b>--file-type</b> parameter should be used to explicitly define the format of" +
"the file in one of the supported formats (Currently only csv is supported). For files separated by a delimiter other than a" +
"',', the --delim parameter can be used to specify a delimeter.\n" +
"\n" +
"If the parameter <b>--dry-run</b> is supplied a sql statement will be generated showing what would be executed if this" +
"were run without the --dry-run flag\n" +
"\n" +
"<b>--float-threshold</b> is the threshold at which a string representing a floating point number should be interpreted as" +
"a float versus an int. If FloatThreshold is 0.0 then any number with a decimal point will be interpreted as a" +
"float (such as 0.0, 1.0, etc). If FloatThreshold is 1.0 then any number with a decimal point will be converted" +
"to an int (0.5 will be the int 0, 1.99 will be the int 1, etc. If the FloatThreshold is 0.001 then numbers with" +
"a fractional component greater than or equal to 0.001 will be treated as a float (1.0 would be an int, 1.0009 would" +
"be an int, 1.001 would be a float, 1.1 would be a float, etc)"
var schImportDocs = cli.CommandDocumentationContent{
ShortDesc: "Creates a new table with an inferred schema.",
LongDesc: `If {{.EmphasisLeft}}--create | -c{{.EmphasisRight}} is given the operation will create {{.LessThan}}table{{.GreaterThan}} with a schema that it infers from the supplied file. One or more primary key columns must be specified using the {{.EmphasisLeft}}--pks{{.EmphasisRight}} parameter.
var schImportSynopsis = []string{
"[--create|--replace] [--force] [--dry-run] [--lower|--upper] [--keep-types] [--file-type <type>] [--float-threshold] [--map <mapping-file>] [--delim <delimiter>]--pks <field>,... <table> <file>",
If {{.EmphasisLeft}}--update | -u{{.EmphasisRight}} is given the operation will update {{.LessThan}}table{{.GreaterThan}} any additional columns, or change the types of columns based on the file supplied. If the {{.EmphasisLeft}}--keep-types{{.EmphasisRight}} parameter is supplied then the types for existing columns will not be modified, even if they differ from what is in the supplied file.
If {{.EmphasisLeft}}--replace | -r{{.EmphasisRight}} is given the operation will replace {{.LessThan}}table{{.GreaterThan}} with a new, empty table which has a schema inferred from the supplied file but columns tags will be maintained across schemas. {{.EmphasisLeft}}--keep-types{{.EmphasisRight}} can also be supplied here to guarantee that types are the same in the file and in the pre-existing table.
A mapping file can be used to map fields between the file being imported and the table's schema being inferred. This can be used when creating a new table, or updating or replacing an existing table.
tblcmds.MappingFileHelp
In create, update, and replace scenarios the file's extension is used to infer the type of the file. If a file does not have the expected extension then the {{.EmphasisLeft}}--file-type{{.EmphasisRight}} parameter should be used to explicitly define the format of the file in one of the supported formats (Currently only csv is supported). For files separated by a delimiter other than a ',', the --delim parameter can be used to specify a delimeter.
If the parameter {{.EmphasisLeft}}--dry-run{{.EmphasisRight}} is supplied a sql statement will be generated showing what would be executed if this were run without the --dry-run flag
{{.EmphasisLeft}}--float-threshold{{.EmphasisRight}} is the threshold at which a string representing a floating point number should be interpreted as a float versus an int. If FloatThreshold is 0.0 then any number with a decimal point will be interpreted as a float (such as 0.0, 1.0, etc). If FloatThreshold is 1.0 then any number with a decimal point will be converted to an int (0.5 will be the int 0, 1.99 will be the int 1, etc. If the FloatThreshold is 0.001 then numbers with a fractional component greater than or equal to 0.001 will be treated as a float (1.0 would be an int, 1.0009 would be an int, 1.001 would be a float, 1.1 would be a float, etc)
`,
Synopsis: []string{
`[--create|--replace] [--force] [--dry-run] [--lower|--upper] [--keep-types] [--file-type {{.LessThan}}type{{.GreaterThan}}] [--float-threshold] [--map {{.LessThan}}mapping-file{{.GreaterThan}}] [--delim {{.LessThan}}delimiter{{.GreaterThan}}]--pks {{.LessThan}}field{{.GreaterThan}},... {{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}`,
},
}
type importOp int
@@ -126,21 +114,21 @@ func (cmd ImportCmd) EventType() eventsapi.ClientEventType {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ImportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, schImportShortDesc, schImportLongDesc, schImportSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, schImportDocs, ap))
}
func (cmd ImportCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"table", "Name of the table to be created."})
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"file", "The file being used to infer the schema."})
ap.SupportsFlag(createFlag, "c", "Create a table with the schema inferred from the <file> provided.")
ap.SupportsFlag(updateFlag, "u", "Update a table to match the inferred schema of the <file> provided")
ap.SupportsFlag(replaceFlag, "r", "Replace a table with a new schema that has the inferred schema from the <file> provided. All previous data will be lost.")
ap.SupportsFlag(createFlag, "c", "Create a table with the schema inferred from the {{.LessThan}}file{{.GreaterThan}} provided.")
ap.SupportsFlag(updateFlag, "u", "Update a table to match the inferred schema of the {{.LessThan}}file{{.GreaterThan}} provided")
ap.SupportsFlag(replaceFlag, "r", "Replace a table with a new schema that has the inferred schema from the {{.LessThan}}file{{.GreaterThan}} provided. All previous data will be lost.")
ap.SupportsFlag(dryRunFlag, "", "Print the sql statement that would be run if executed without the flag.")
ap.SupportsFlag(keepTypesParam, "", "When a column already exists in the table, and it's also in the <file> provided, use the type from the table.")
ap.SupportsFlag(keepTypesParam, "", "When a column already exists in the table, and it's also in the {{.LessThan}}file{{.GreaterThan}} provided, use the type from the table.")
ap.SupportsString(fileTypeParam, "", "type", "Explicitly define the type of the file if it can't be inferred from the file extension.")
ap.SupportsString(pksParam, "", "comma-separated-col-names", "List of columns used as the primary key cols. Order of the columns will determine sort order.")
ap.SupportsString(mappingParam, "", "mapping-file", "A file that can map a column name in <file> to a new value.")
ap.SupportsString(mappingParam, "", "mapping-file", "A file that can map a column name in {{.LessThan}}file{{.GreaterThan}} to a new value.")
ap.SupportsString(floatThresholdParam, "", "float", "Minimum value at which the fractional component of a value must exceed in order to be considered a float.")
ap.SupportsString(delimParam, "", "delimiter", "Specify a delimiter for a csv style file with a non-comma delimiter.")
return ap
@@ -150,7 +138,7 @@ func (cmd ImportCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd ImportCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, schImportShortDesc, schImportLongDesc, schImportSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, schImportDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() != 2 {
+9 -8
View File
@@ -30,13 +30,14 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var tblSchemaShortDesc = "Shows the schema of one or more tables."
var tblSchemaLongDesc = "dolt table schema displays the schema of tables at a given commit. If no commit is provided the working set will be used." +
"\n" +
"A list of tables can optionally be provided. If it is omitted all table schemas will be shown."
var tblSchemaDocs = cli.CommandDocumentationContent{
ShortDesc: "Shows the schema of one or more tables.",
LongDesc: `{{.EmphasisLeft}}dolt table schema{{.EmphasisRight}} displays the schema of tables at a given commit. If no commit is provided the working set will be used. +
var tblSchemaSynopsis = []string{
"[<commit>] [<table>...]",
A list of tables can optionally be provided. If it is omitted all table schemas will be shown.`,
Synopsis: []string{
"[{{.LessThan}}commit{{.GreaterThan}}] [{{.LessThan}}table{{.GreaterThan}}...]",
},
}
var bold = color.New(color.Bold)
@@ -56,7 +57,7 @@ func (cmd ShowCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ShowCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, tblSchemaShortDesc, tblSchemaLongDesc, tblSchemaSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, tblSchemaDocs, ap))
}
func (cmd ShowCmd) createArgParser() *argparser.ArgParser {
@@ -74,7 +75,7 @@ func (cmd ShowCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd ShowCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, tblSchemaShortDesc, tblSchemaLongDesc, tblSchemaSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblSchemaDocs, ap))
apr := cli.ParseArgs(ap, args, help)
verr := printSchemas(ctx, apr, dEnv)
+4 -4
View File
@@ -33,9 +33,9 @@ import (
// SendMetricsCommand is the command used for sending metrics
const (
SendMetricsCommand = "send-metrics"
outputFlag = "output"
sendMetricsShortDec = "Send metrics to the events server or print them to stdout"
SendMetricsCommand = "send-metrics"
outputFlag = "output"
sendMetricsShortDesc = "Send metrics to the events server or print them to stdout"
)
type SendMetricsCmd struct{}
@@ -72,7 +72,7 @@ func (cmd SendMetricsCmd) Exec(ctx context.Context, commandStr string, args []st
ap := argparser.NewArgParser()
ap.SupportsFlag(outputFlag, "o", "Flush events to stdout.")
help, _ := cli.HelpAndUsagePrinters(commandStr, sendMetricsShortDec, "", []string{}, ap)
help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, cli.CommandDocumentationContent{ShortDesc: sendMetricsShortDesc}, ap))
apr := cli.ParseArgs(ap, args, help)
metricsDisabled := dEnv.Config.GetStringOrDefault(env.MetricsDisabled, "false")
+414 -137
View File
@@ -31,6 +31,7 @@ import (
sqle "github.com/src-d/go-mysql-server"
"github.com/src-d/go-mysql-server/sql"
"vitess.io/vitess/go/vt/sqlparser"
"vitess.io/vitess/go/vt/vterrors"
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
@@ -55,35 +56,49 @@ import (
"github.com/liquidata-inc/dolt/go/store/types"
)
var sqlShortDesc = "Runs a SQL query"
var sqlLongDesc = `Runs a SQL query you specify. By default, begins an interactive shell to run queries and view the
results. With the -q option, runs the given query and prints any results, then exits.
var sqlDocs = cli.CommandDocumentationContent{
ShortDesc: "Runs a SQL query",
LongDesc: "Runs a SQL query you specify. With no arguments, begins an interactive shell to run queries and view " +
"the results. With the {{.EmphasisLeft}}-q{{.EmphasisRight}} option, runs the given query and prints any " +
"results, then exits.\n" +
"\n" +
"By default, {{.EmphasisLeft}}-q{{.EmphasisRight}} executes a single statement. To execute multiple SQL " +
"statements separated by semicolons, use {{.EmphasisLeft}}-b{{.EmphasisRight}} to enable batch mode. Queries can" +
"be saved with {{.EmphasisLeft}}-s{{.EmphasisRight}}.\n" +
"\n" +
"Alternatively {{.EmpahasisLeft}}-x{{.EmphasisRight}} can be used to execute a saved query by name.\n" +
"\n" +
"Pipe SQL statements to dolt sql (no {{.EmphasisLeft}}-q{{.EmphasisRight}}) to execute a SQL import or update " +
"script.\n" +
"\n" +
"Known limitations:\n" +
"* No support for creating indexes\n" +
"* No support for foreign keys\n" +
"* No support for column constraints besides NOT NULL\n" +
"* No support for default values\n" +
"* Joins can only use indexes for two table joins. Three or more tables in a join query will use a non-indexed " +
"join, which is very slow.",
Pipe SQL statements to dolt sql (no -q) to execute a SQL import or update script.
Known limitations:
* No support for creating indexes
* No support for foreign keys
* No support for column constraints besides NOT NULL
* No support for default values
* Column types aren't always preserved accurately from SQL create table statements. VARCHAR columns are unlimited
length; FLOAT, INTEGER columns are 64 bit
* Joins can only use indexes for two table joins. Three or more tables in a join query will use a non-indexed
join, which is very slow.
`
var sqlSynopsis = []string{
"",
"-q <query>",
"-q <query> -r <result format>",
"-q <query> -s <name> -m <message>",
Synopsis: []string{
"",
"-q {{.LessThan}}query{{.GreaterThan}}",
"-q {{.LessThan}}query;query{{.GreaterThan}} -b",
"-q {{.LessThan}}query{{.GreaterThan}} -r {{.LessThan}}result format{{.GreaterThan}}",
"-q {{.LessThan}}query{{.GreaterThan}} -s {{.LessThan}}name{{.GreaterThan}} -m {{.LessThan}}message{{.GreaterThan}}",
"-x {{.LessThan}}name{{.GreaterThan}}",
"--list-saved",
},
}
const (
queryFlag = "query"
formatFlag = "result-format"
saveFlag = "save"
messageFlag = "message"
welcomeMsg = `# Welcome to the DoltSQL shell.
queryFlag = "query"
formatFlag = "result-format"
saveFlag = "save"
executeFlag = "execute"
listSavedFlag = "list-saved"
messageFlag = "message"
batchFlag = "batch"
welcomeMsg = `# Welcome to the DoltSQL shell.
# Statements must be terminated with ';'.
# "exit" or "quit" (or Ctrl-D) to exit.`
)
@@ -103,7 +118,7 @@ func (cmd SqlCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd SqlCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, sqlShortDesc, sqlLongDesc, sqlSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, sqlDocs, ap))
}
func (cmd SqlCmd) createArgParser() *argparser.ArgParser {
@@ -111,7 +126,10 @@ func (cmd SqlCmd) createArgParser() *argparser.ArgParser {
ap.SupportsString(queryFlag, "q", "SQL query to run", "Runs a single query and exits")
ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv. Defaults to tabular. ")
ap.SupportsString(saveFlag, "s", "saved query name", "Used with --query, save the query to the query catalog with the name provided. Saved queries can be examined in the dolt_query_catalog system table.")
ap.SupportsString(executeFlag, "x", "saved query name", "Executes a saved query with the given name")
ap.SupportsFlag(listSavedFlag, "l", "Lists all saved queries")
ap.SupportsString(messageFlag, "m", "saved query description", "Used with --query and --save, saves the query with the descriptive message given. See also --name")
ap.SupportsFlag(batchFlag, "b", "batch mode, to run more than one query with --query, separated by ';'. Piping input to sql with no arguments also uses batch mode")
return ap
}
@@ -123,9 +141,14 @@ func (cmd SqlCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, sqlShortDesc, sqlLongDesc, sqlSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, sqlDocs, ap))
apr := cli.ParseArgs(ap, args, help)
err := validateSqlArgs(apr)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
args = apr.Args()
root, verr := GetWorkingWithVErr(dEnv)
@@ -142,65 +165,186 @@ func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dE
}
origRoot := root
if query, queryOK := apr.GetValue(queryFlag); queryOK {
batchMode := apr.Contains(batchFlag)
saveMessage := apr.GetValueOrDefault(messageFlag, "")
saveName := apr.GetValueOrDefault(saveFlag, "")
if batchMode {
batchInput := strings.NewReader(query)
root, verr = execBatch(ctx, dEnv, root, batchInput, format)
} else {
root, verr = execQuery(ctx, dEnv, root, query, format)
err := validateSqlArgs(apr)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
if verr != nil {
return HandleVErrAndExitCode(verr, usage)
}
saveName := apr.GetValueOrDefault(saveFlag, "")
if saveName != "" {
saveMessage := apr.GetValueOrDefault(messageFlag, "")
root, verr = saveQuery(ctx, root, dEnv, query, saveName, saveMessage)
}
}
} else if savedQueryName, exOk := apr.GetValue(executeFlag); exOk {
sq, err := dsqle.RetrieveFromQueryCatalog(ctx, root, savedQueryName)
// run a single command and exit
if query, ok := apr.GetValue(queryFlag); ok {
se, err := newSqlEngine(ctx, dEnv, dsqle.NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
if err := processQuery(ctx, query, se); err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
} else if se.sdb.Root() != origRoot {
return HandleVErrAndExitCode(UpdateWorkingWithVErr(dEnv, se.sdb.Root()), usage)
} else {
if saveName != "" {
return HandleVErrAndExitCode(cmd.saveQuery(context.Background(), se.sdb.Root(), dEnv, query, saveName, saveMessage), usage)
}
cli.PrintErrf("Executing saved query '%s':\n%s\n", savedQueryName, sq.Query)
root, verr = execQuery(ctx, dEnv, root, sq.Query, format)
} else if apr.Contains(listSavedFlag) {
hasQC, err := root.HasTable(ctx, doltdb.DoltQueryCatalogTableName)
if err != nil {
verr := errhand.BuildDError("error: Failed to read from repository.").AddCause(err).Build()
return HandleVErrAndExitCode(verr, usage)
}
if !hasQC {
return 0
}
query := "SELECT * FROM " + doltdb.DoltQueryCatalogTableName
_, verr = execQuery(ctx, dEnv, root, query, format)
} else {
// Run in either batch mode for piped input, or shell mode for interactive
runInBatchMode := true
fi, err := os.Stdin.Stat()
if err != nil {
if !osutil.IsWindows {
return HandleVErrAndExitCode(errhand.BuildDError("Couldn't stat STDIN. This is a bug.").Build(), usage)
}
} else {
runInBatchMode = fi.Mode()&os.ModeCharDevice == 0
}
if runInBatchMode {
root, verr = execBatch(ctx, dEnv, root, os.Stdin, format)
} else {
root, verr = execShell(ctx, dEnv, root, format)
}
}
// Run in either batch mode for piped input, or shell mode for interactive
fi, err := os.Stdin.Stat()
var se *sqlEngine
// Windows has a bug where STDIN can't be statted in some cases, see https://github.com/golang/go/issues/33570
if (err != nil && osutil.IsWindows) || (fi.Mode()&os.ModeCharDevice) == 0 {
se, err = newSqlEngine(ctx, dEnv, dsqle.NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
err = runBatchMode(ctx, se)
if err != nil {
return 1
}
} else if err != nil {
HandleVErrAndExitCode(errhand.BuildDError("Couldn't stat STDIN. This is a bug.").Build(), usage)
} else {
se, err = newSqlEngine(ctx, dEnv, dsqle.NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
err = runShell(ctx, se, dEnv)
if err != nil {
return HandleVErrAndExitCode(errhand.BuildDError("unable to start shell").AddCause(err).Build(), usage)
}
if verr != nil {
return HandleVErrAndExitCode(verr, usage)
}
// If the SQL session wrote a new root value, update the working set with it
if se.sdb.Root() != origRoot {
return HandleVErrAndExitCode(UpdateWorkingWithVErr(dEnv, se.sdb.Root()), usage)
if origRoot != root {
verr = UpdateWorkingWithVErr(dEnv, root)
}
return 0
return HandleVErrAndExitCode(verr, usage)
}
func execShell(ctx context.Context, dEnv *env.DoltEnv, root *doltdb.RootValue, format resultFormat) (*doltdb.RootValue, errhand.VerboseError) {
se, err := newSqlEngine(ctx, dEnv, dsqle.NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
}
err = runShell(ctx, se, dEnv)
if err != nil {
return nil, errhand.BuildDError("unable to start shell").AddCause(err).Build()
}
return se.sdb.Root(), nil
}
func execBatch(ctx context.Context, dEnv *env.DoltEnv, root *doltdb.RootValue, batchInput io.Reader, format resultFormat) (*doltdb.RootValue, errhand.VerboseError) {
se, err := newSqlEngine(ctx, dEnv, dsqle.NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
}
err = runBatchMode(ctx, se, batchInput)
if err != nil {
return nil, errhand.BuildDError("Error processing batch").Build()
}
return se.sdb.Root(), nil
}
func execQuery(ctx context.Context, dEnv *env.DoltEnv, root *doltdb.RootValue, query string, format resultFormat) (*doltdb.RootValue, errhand.VerboseError) {
se, err := newSqlEngine(ctx, dEnv, dsqle.NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState), format)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
}
sqlSch, rowIter, err := processQuery(ctx, query, se)
if err != nil {
verr := formatQueryError(query, err)
return nil, verr
}
if rowIter != nil {
defer rowIter.Close()
err = se.prettyPrintResults(ctx, se.ddb.ValueReadWriter().Format(), sqlSch, rowIter)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
}
}
return se.sdb.Root(), nil
}
func formatQueryError(query string, err error) errhand.VerboseError {
const (
maxStatementLen = 128
maxPosWhenTruncated = 64
)
if se, ok := vterrors.AsSyntaxError(err); ok {
verrBuilder := errhand.BuildDError("Error parsing SQL")
verrBuilder.AddDetails(se.Message)
statement := se.Statement
position := se.Position
prevLines := ""
for {
idxNewline := strings.IndexRune(statement, '\n')
if idxNewline == -1 {
break
} else if idxNewline < position {
position -= idxNewline + 1
prevLines += statement[:idxNewline+1]
statement = statement[idxNewline+1:]
} else {
statement = statement[:idxNewline]
break
}
}
if len(statement) > maxStatementLen {
if position > maxPosWhenTruncated {
statement = statement[position-maxPosWhenTruncated:]
position = maxPosWhenTruncated
}
if len(statement) > maxStatementLen {
statement = statement[:maxStatementLen]
}
}
verrBuilder.AddDetails(prevLines + statement)
marker := make([]rune, position+1)
for i := 0; i < position; i++ {
marker[i] = ' '
}
marker[position] = '^'
verrBuilder.AddDetails(string(marker))
return verrBuilder.Build()
} else {
return errhand.VerboseErrorFromError(err)
}
}
func getFormat(format string) (resultFormat, errhand.VerboseError) {
@@ -218,11 +362,47 @@ func validateSqlArgs(apr *argparser.ArgParseResults) error {
_, query := apr.GetValue(queryFlag)
_, save := apr.GetValue(saveFlag)
_, msg := apr.GetValue(messageFlag)
_, batch := apr.GetValue(batchFlag)
_, list := apr.GetValue(listSavedFlag)
_, execute := apr.GetValue(executeFlag)
if len(apr.Args()) > 0 && !query {
return errhand.BuildDError("Invalid Argument: use --query or -q to pass inline SQL queries").Build()
}
if execute {
if list {
return errhand.BuildDError("Invalid Argument: --execute|-x is not compatible with --list-saved").Build()
} else if query {
return errhand.BuildDError("Invalid Argument: --execute|-x is not compatible with --query|-q").Build()
} else if msg {
return errhand.BuildDError("Invalid Argument: --execute|-x is not compatible with --message|-m").Build()
} else if save {
return errhand.BuildDError("Invalid Argument: --execute|-x is not compatible with --save|-s").Build()
}
}
if list {
if execute {
return errhand.BuildDError("Invalid Argument: --list-saved is not compatible with --executed|x").Build()
} else if query {
return errhand.BuildDError("Invalid Argument: --list-saved is not compatible with --query|-q").Build()
} else if msg {
return errhand.BuildDError("Invalid Argument: --list-saved is not compatible with --message|-m").Build()
} else if save {
return errhand.BuildDError("Invalid Argument: --list-saved is not compatible with --save|-s").Build()
}
}
if batch {
if !query {
return errhand.BuildDError("Invalid Argument: --batch|-b must be used with --query|-q").Build()
}
if save || msg {
return errhand.BuildDError("Invalid Argument: --batch|-b is not compatible with --save|-s or --message|-m").Build()
}
}
if query {
if !save && msg {
return errhand.BuildDError("Invalid Argument: --message|-m is only used with --query|-q and --save|-s").Build()
@@ -240,13 +420,13 @@ func validateSqlArgs(apr *argparser.ArgParseResults) error {
}
// Saves the query given to the catalog with the name and message given.
func (cmd SqlCmd) saveQuery(ctx context.Context, root *doltdb.RootValue, dEnv *env.DoltEnv, query string, name string, message string) errhand.VerboseError {
newRoot, err := dsqle.NewQueryCatalogEntry(ctx, root, name, query, message)
func saveQuery(ctx context.Context, root *doltdb.RootValue, dEnv *env.DoltEnv, query string, name string, message string) (*doltdb.RootValue, errhand.VerboseError) {
_, newRoot, err := dsqle.NewQueryCatalogEntryWithNameAsID(ctx, root, name, query, message)
if err != nil {
return errhand.BuildDError("Couldn't save query").AddCause(err).Build()
return nil, errhand.BuildDError("Couldn't save query").AddCause(err).Build()
}
return UpdateWorkingWithVErr(dEnv, newRoot)
return newRoot, nil
}
// ScanStatements is a split function for a Scanner that returns each SQL statement in the input as a token. It doesn't
@@ -268,8 +448,8 @@ func scanStatements(data []byte, atEOF bool) (advance int, token []byte, err err
}
// runBatchMode processes queries until EOF. The Root of the sqlEngine may be updated.
func runBatchMode(ctx context.Context, se *sqlEngine) error {
scanner := bufio.NewScanner(os.Stdin)
func runBatchMode(ctx context.Context, se *sqlEngine, input io.Reader) error {
scanner := bufio.NewScanner(input)
const maxCapacity = 512 * 1024
buf := make([]byte, maxCapacity)
scanner.Buffer(buf, maxCapacity)
@@ -287,7 +467,8 @@ func runBatchMode(ctx context.Context, se *sqlEngine) error {
continue
}
if err := processBatchQuery(ctx, query, se); err != nil {
_, _ = fmt.Fprintf(cli.CliErr, "Error processing query '%s': %s\n", query, err.Error())
verr := formatQueryError(query, err)
cli.PrintErrln(verr.Verbose())
return err
}
query = ""
@@ -388,8 +569,15 @@ func runShell(ctx context.Context, se *sqlEngine, dEnv *env.DoltEnv) error {
return
}
if err := processQuery(ctx, query, se); err != nil {
shell.Println(color.RedString(err.Error()))
if sqlSch, rowIter, err := processQuery(ctx, query, se); err != nil {
verr := formatQueryError(query, err)
shell.Println(verr.Verbose())
} else if rowIter != nil {
defer rowIter.Close()
err = se.prettyPrintResults(ctx, se.ddb.ValueReadWriter().Format(), sqlSch, rowIter)
if err != nil {
shell.Println(color.RedString(err.Error()))
}
}
// TODO: there's a bug in the readline library when editing multi-line history entries.
@@ -520,56 +708,75 @@ func prepend(s string, ss []string) []string {
}
// Processes a single query. The Root of the sqlEngine will be updated if necessary.
func processQuery(ctx context.Context, query string, se *sqlEngine) error {
// Returns the schema and the row iterator for the results, which may be nil, and an error if one occurs.
func processQuery(ctx context.Context, query string, se *sqlEngine) (sql.Schema, sql.RowIter, error) {
sqlStatement, err := sqlparser.Parse(query)
if err == sqlparser.ErrEmpty {
// silently skip empty statements
return nil
return nil, nil, nil
} else if err != nil {
return fmt.Errorf("Error parsing SQL: %v.", err.Error())
return nil, nil, err
}
switch s := sqlStatement.(type) {
case *sqlparser.Select, *sqlparser.Insert, *sqlparser.Update, *sqlparser.OtherRead, *sqlparser.Show, *sqlparser.Explain, *sqlparser.Union:
sqlSch, rowIter, err := se.query(ctx, query)
if err == nil {
defer rowIter.Close()
err = se.prettyPrintResults(ctx, se.ddb.ValueReadWriter().Format(), sqlSch, rowIter)
}
return err
return se.query(ctx, query)
case *sqlparser.Delete:
ok := se.checkThenDeleteAllRows(ctx, s)
if ok {
return nil
return nil, nil, err
}
sqlSch, rowIter, err := se.query(ctx, query)
if err == nil {
defer rowIter.Close()
err = se.prettyPrintResults(ctx, se.ddb.Format(), sqlSch, rowIter)
}
return err
return se.query(ctx, query)
case *sqlparser.DDL:
_, err := sqlparser.ParseStrictDDL(query)
if err != nil {
return fmt.Errorf("Error parsing DDL: %v.", err.Error())
if se, ok := vterrors.AsSyntaxError(err); ok {
return nil, nil, vterrors.SyntaxError{Message: "While Parsing DDL: " + se.Message, Position: se.Position, Statement: se.Statement}
} else {
return nil, nil, fmt.Errorf("Error parsing DDL: %v.", err.Error())
}
}
return se.ddl(ctx, s, query)
default:
return fmt.Errorf("Unsupported SQL statement: '%v'.", query)
return nil, nil, fmt.Errorf("Unsupported SQL statement: '%v'.", query)
}
}
type stats struct {
numRowsInserted int
numRowsUpdated int
numErrorsIgnored int
rowsInserted int
rowsUpdated int
rowsDeleted int
unflushedEdits int
unprintedEdits int
}
var batchEditStats stats
var batchEditStats = &stats{}
var displayStrLen int
const maxBatchSize = 50000
const updateInterval = 500
const maxBatchSize = 200000
const updateInterval = 1000
func (s *stats) numUpdates() int {
return s.rowsUpdated + s.rowsDeleted + s.rowsInserted
}
func (s *stats) shouldUpdateBatchModeOutput() bool {
return s.unprintedEdits >= updateInterval
}
func (s *stats) shouldFlush() bool {
return s.unflushedEdits >= maxBatchSize
}
func flushBatchedEdits(ctx context.Context, se *sqlEngine) error {
err := se.sdb.Flush(ctx)
if err != nil {
return err
}
batchEditStats.unflushedEdits = 0
return nil
}
// Processes a single query in batch mode. The Root of the sqlEngine may or may not be changed.
func processBatchQuery(ctx context.Context, query string, se *sqlEngine) error {
@@ -581,55 +788,115 @@ func processBatchQuery(ctx context.Context, query string, se *sqlEngine) error {
return fmt.Errorf("Error parsing SQL: %v.", err.Error())
}
switch sqlStatement.(type) {
case *sqlparser.Insert:
_, rowIter, err := se.query(ctx, query)
if canProcessAsBatchInsert(sqlStatement) {
err = processBatchInsert(se, ctx, query, sqlStatement)
if err != nil {
return fmt.Errorf("Error inserting rows: %v", err.Error())
return err
}
} else {
err := processNonInsertBatchQuery(ctx, se, query, sqlStatement)
if err != nil {
return err
}
}
if batchEditStats.shouldUpdateBatchModeOutput() {
updateBatchInsertOutput()
}
return nil
}
func processNonInsertBatchQuery(ctx context.Context, se *sqlEngine, query string, sqlStatement sqlparser.Statement) error {
// We need to commit whatever batch edits we've accumulated so far before executing the query
err := flushBatchedEdits(ctx, se)
if err != nil {
return err
}
sqlSch, rowIter, err := processQuery(ctx, query, se)
if err != nil {
return err
}
if rowIter != nil {
defer rowIter.Close()
err = mergeInsertResultIntoStats(rowIter, &batchEditStats)
err = mergeResultIntoStats(sqlStatement, rowIter, batchEditStats)
if err != nil {
return fmt.Errorf("Error inserting rows: %v", err.Error())
return fmt.Errorf("error executing statement: %v", err.Error())
}
if batchEditStats.numRowsInserted%maxBatchSize == 0 {
err := se.sdb.Flush(ctx)
// Some statement types should print results, even in batch mode.
switch sqlStatement.(type) {
case *sqlparser.Select, *sqlparser.OtherRead, *sqlparser.Show, *sqlparser.Explain, *sqlparser.Union:
if displayStrLen > 0 {
// If we've been printing in batch mode, print a newline to put the regular output on its own line
cli.Print("\n")
displayStrLen = 0
}
err = se.prettyPrintResults(ctx, se.ddb.ValueReadWriter().Format(), sqlSch, rowIter)
if err != nil {
return err
}
}
}
if batchEditStats.numRowsInserted%updateInterval == 0 {
updateBatchInsertOutput()
// And flush again afterwards, to make sure any following insert statements have the latest data
return flushBatchedEdits(ctx, se)
}
func processBatchInsert(se *sqlEngine, ctx context.Context, query string, sqlStatement sqlparser.Statement) error {
_, rowIter, err := se.query(ctx, query)
if err != nil {
return fmt.Errorf("Error inserting rows: %v", err.Error())
}
if rowIter != nil {
defer rowIter.Close()
err = mergeResultIntoStats(sqlStatement, rowIter, batchEditStats)
if err != nil {
return fmt.Errorf("Error inserting rows: %v", err.Error())
}
}
return nil
if batchEditStats.shouldFlush() {
return flushBatchedEdits(ctx, se)
}
return nil
}
// canProcessBatchInsert returns whether the given statement can be processed as a batch insert. Only simple inserts
// (inserting a list of values) can be processed in this way. Other kinds of insert (notably INSERT INTO SELECT AS) need
// a flushed root and can't benefit from batch optimizations.
func canProcessAsBatchInsert(sqlStatement sqlparser.Statement) bool {
switch s := sqlStatement.(type) {
case *sqlparser.Insert:
if _, ok := s.Rows.(sqlparser.Values); ok {
return true
}
return false
default:
// For any other kind of statement, we need to commit whatever batch edit we've accumulated so far before executing
// the query
err := se.sdb.Flush(ctx)
if err != nil {
return err
}
err = processQuery(ctx, query, se)
if err != nil {
return err
}
return nil
return false
}
}
func updateBatchInsertOutput() {
displayStr := fmt.Sprintf("Rows inserted: %d", batchEditStats.numRowsInserted)
displayStr := fmt.Sprintf("Rows inserted: %d Rows updated: %d Rows deleted: %d",
batchEditStats.rowsInserted, batchEditStats.rowsUpdated, batchEditStats.rowsDeleted)
displayStrLen = cli.DeleteAndPrint(displayStrLen, displayStr)
batchEditStats.unprintedEdits = 0
}
// Updates the batch insert stats with the results of an insert operation.
func mergeInsertResultIntoStats(rowIter sql.RowIter, s *stats) error {
// Updates the batch insert stats with the results of an INSERT, UPDATE, or DELETE statement.
func mergeResultIntoStats(statement sqlparser.Statement, rowIter sql.RowIter, s *stats) error {
switch statement.(type) {
case *sqlparser.Insert, *sqlparser.Delete, *sqlparser.Update:
break
default:
return nil
}
for {
row, err := rowIter.Next()
if err == io.EOF {
@@ -637,8 +904,17 @@ func mergeInsertResultIntoStats(rowIter sql.RowIter, s *stats) error {
} else if err != nil {
return err
} else {
updated := row[0].(int64)
s.numRowsInserted += int(updated)
numRowsUpdated := row[0].(int64)
s.unflushedEdits += int(numRowsUpdated)
s.unprintedEdits += int(numRowsUpdated)
switch statement.(type) {
case *sqlparser.Insert:
s.rowsInserted += int(numRowsUpdated)
case *sqlparser.Delete:
s.rowsDeleted += int(numRowsUpdated)
case *sqlparser.Update:
s.rowsUpdated += int(numRowsUpdated)
}
}
}
}
@@ -661,6 +937,7 @@ type sqlEngine struct {
func newSqlEngine(ctx context.Context, dEnv *env.DoltEnv, db *dsqle.Database, format resultFormat) (*sqlEngine, error) {
engine := sqle.NewDefault()
engine.AddDatabase(db)
engine.AddDatabase(sql.NewInformationSchemaDatabase(engine.Catalog))
engine.Catalog.RegisterIndexDriver(dsqle.NewDoltIndexDriver(db))
err := engine.Init()
@@ -827,15 +1104,15 @@ func (se *sqlEngine) checkThenDeleteAllRows(ctx context.Context, s *sqlparser.De
// Executes a SQL DDL statement (create, update, etc.). Updates the new root value in
// the sqlEngine if necessary.
func (se *sqlEngine) ddl(ctx context.Context, ddl *sqlparser.DDL, query string) error {
func (se *sqlEngine) ddl(ctx context.Context, ddl *sqlparser.DDL, query string) (sql.Schema, sql.RowIter, error) {
switch ddl.Action {
case sqlparser.CreateStr, sqlparser.DropStr, sqlparser.AlterStr, sqlparser.RenameStr:
_, ri, err := se.query(ctx, query)
if err == nil {
ri.Close()
}
return err
return nil, nil, err
default:
return fmt.Errorf("Unhandled DDL action %v in query %v", ddl.Action, query)
return nil, nil, fmt.Errorf("Unhandled DDL action %v in query %v", ddl.Action, query)
}
}
+26
View File
@@ -53,6 +53,32 @@ func TestSqlConsole(t *testing.T) {
}
func TestSqlBatchMode(t *testing.T) {
tests := []struct {
query string
expectedRes int
}{
{
"create table test (a int primary key);" +
"insert into test values (1),(2),(3);" +
"select * from test;",
0,
},
}
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := createEnvWithSeedData(t)
args := []string{"-b", "-q", test.query}
commandStr := "dolt sql"
result := SqlCmd{}.Exec(context.TODO(), commandStr, args, dEnv)
assert.Equal(t, test.expectedRes, result)
})
}
}
// Smoke tests, values are printed to console
func TestSqlSelect(t *testing.T) {
tests := []struct {
+4 -2
View File
@@ -29,11 +29,12 @@ import (
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle"
)
// Serve starts a MySQL-compatible server. Returns any errors that were encountered.
func Serve(ctx context.Context, serverConfig *ServerConfig, rootValue *doltdb.RootValue, serverController *ServerController) (startError error, closeError error) {
func Serve(ctx context.Context, serverConfig *ServerConfig, rootValue *doltdb.RootValue, serverController *ServerController, dEnv *env.DoltEnv) (startError error, closeError error) {
if serverConfig == nil {
cli.Println("No configuration given, using defaults")
serverConfig = DefaultServerConfig()
@@ -76,8 +77,9 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, rootValue *doltdb.Ro
userAuth := auth.NewAudit(auth.NewNativeSingle(serverConfig.User, serverConfig.Password, permissions), auth.NewAuditLog(logrus.StandardLogger()))
sqlEngine := sqle.NewDefault()
db := dsqle.NewDatabase("dolt", rootValue, nil, nil)
db := dsqle.NewDatabase("dolt", rootValue, dEnv.DoltDB, dEnv.RepoState)
sqlEngine.AddDatabase(db)
sqlEngine.AddDatabase(sql.NewInformationSchemaDatabase(sqlEngine.Catalog))
startError = dsqle.RegisterSchemaFragments(sql.NewContext(ctx), sqlEngine.Catalog, db)
if startError != nil {
@@ -121,7 +121,7 @@ func TestServerGoodParams(t *testing.T) {
t.Run(test.String(), func(t *testing.T) {
sc := CreateServerController()
go func(config *ServerConfig, sc *ServerController) {
_, _ = Serve(context.Background(), config, root, sc)
_, _ = Serve(context.Background(), config, root, sc, env)
}(test, sc)
err := sc.WaitForStart()
require.NoError(t, err)
@@ -145,7 +145,7 @@ func TestServerSelect(t *testing.T) {
sc := CreateServerController()
defer sc.StopServer()
go func() {
_, _ = Serve(context.Background(), serverConfig, root, sc)
_, _ = Serve(context.Background(), serverConfig, root, sc, env)
}()
err := sc.WaitForStart()
require.NoError(t, err)
+14 -10
View File
@@ -37,14 +37,15 @@ const (
logLevelFlag = "loglevel"
)
var sqlServerShortDesc = "Start a MySQL-compatible server."
var sqlServerLongDesc = `Start a MySQL-compatible server which can be connected to by MySQL clients.
var sqlServerDocs = cli.CommandDocumentationContent{
ShortDesc: "Start a MySQL-compatible server.",
LongDesc: `Start a MySQL-compatible server which can be connected to by MySQL clients.
Currently, only SELECT statements are operational, as support for other statements is
still being developed.
`
var sqlServerSynopsis = []string{
"[-H <host>] [-P <port>] [-u <user>] [-p <password>] [-t <timeout>] [-l <loglevel>] [-r]",
Currently, only {{.EmphasisLeft}}SELECT{{.EmphasisRight}} statements are operational, as support for other statements is still being developed.
`,
Synopsis: []string{
"[-H {{.LessThan}}host{{.GreaterThan}}] [-P {{.LessThan}}port{{.GreaterThan}}] [-u {{.LessThan}}user{{.GreaterThan}}] [-p {{.LessThan}}password{{.GreaterThan}}] [-t {{.LessThan}}timeout{{.GreaterThan}}] [-l {{.LessThan}}loglevel{{.GreaterThan}}] [-r]",
},
}
type SqlServerCmd struct{}
@@ -62,7 +63,7 @@ func (cmd SqlServerCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd SqlServerCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := createArgParser(DefaultServerConfig())
return cli.CreateMarkdown(fs, path, commandStr, sqlServerShortDesc, sqlServerLongDesc, sqlServerSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, sqlServerDocs, ap))
}
func createArgParser(serverConfig *ServerConfig) *argparser.ArgParser {
@@ -92,7 +93,7 @@ func SqlServerImpl(ctx context.Context, commandStr string, args []string, dEnv *
serverConfig := DefaultServerConfig()
ap := createArgParser(serverConfig)
help, usage := cli.HelpAndUsagePrinters(commandStr, sqlServerShortDesc, sqlServerLongDesc, sqlServerSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, sqlServerDocs, ap))
apr := cli.ParseArgs(ap, args, help)
args = apr.Args()
@@ -123,7 +124,10 @@ func SqlServerImpl(ctx context.Context, commandStr string, args []string, dEnv *
if logLevel, ok := apr.GetValue(logLevelFlag); ok {
serverConfig.LogLevel = LogLevel(logLevel)
}
if startError, closeError := Serve(ctx, serverConfig, root, serverController); startError != nil || closeError != nil {
cli.PrintErrf("Starting server on port %d.", serverConfig.Port)
if startError, closeError := Serve(ctx, serverConfig, root, serverController, dEnv); startError != nil || closeError != nil {
if startError != nil {
cli.PrintErrln(startError)
}
+7 -9
View File
@@ -35,13 +35,11 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/set"
)
var statusShortDesc = "Show the working status"
var statusLongDesc = `Displays working tables that differ from the current HEAD commit, tables that differ from the
staged tables, and tables that are in the working tree that are not tracked by dolt. The first are what you would
commit by running <b>dolt commit</b>; the second and third are what you could commit by running <b>dolt add .</b>
before running <b>dolt commit</b>.`
var statusSynopsis = []string{""}
var statusDocs = cli.CommandDocumentationContent{
ShortDesc: "Show the working status",
LongDesc: `Displays working tables that differ from the current HEAD commit, tables that differ from the staged tables, and tables that are in the working tree that are not tracked by dolt. The first are what you would commit by running {{.EmphasisLeft}}dolt commit{{.GreaterThan}}; the second and third are what you could commit by running {{.EmphasisLeft}}dolt add .{{.GreaterThan}} before running {{.EmphasisLeft}}dolt commit{{.GreaterThan}}.`,
Synopsis: []string{""},
}
type StatusCmd struct{}
@@ -58,7 +56,7 @@ func (cmd StatusCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd StatusCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, statusShortDesc, statusLongDesc, statusSynopsis, ap)
return CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, statusDocs, ap))
}
func (cmd StatusCmd) createArgParser() *argparser.ArgParser {
@@ -69,7 +67,7 @@ func (cmd StatusCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd StatusCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, _ := cli.HelpAndUsagePrinters(commandStr, statusShortDesc, statusLongDesc, statusSynopsis, ap)
help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, statusDocs, ap))
cli.ParseArgs(ap, args, help)
stagedTblDiffs, notStagedTblDiffs, err := diff.GetTableDiffs(ctx, dEnv)
+11 -12
View File
@@ -28,18 +28,17 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
)
var tblCpShortDesc = "Makes a copy of a table"
var tblCpLongDesc = `The dolt table cp command makes a copy of a table at a given commit. If a commit is not specified the copy is made of
the table from the current working set.
var tblCpDocs = cli.CommandDocumentationContent{
ShortDesc: "Makes a copy of a table",
LongDesc: `The dolt table cp command makes a copy of a table at a given commit. If a commit is not specified the copy is made of the table from the current working set.
If a table exists at the target location this command will fail unless the <b>--force|-f</b> flag is provided. In this
case the table at the target location will be overwritten with the copied table.
If a table exists at the target location this command will fail unless the {{.EmphasisLeft}}--force|-f{{.EmphasisRight}} flag is provided. In this case the table at the target location will be overwritten with the copied table.
All changes will be applied to the working tables and will need to be staged using <b>dolt add</b> and committed
using <b>dolt commit</b>.`
var tblCpSynopsis = []string{
"[-f] [<commit>] <oldtable> <newtable>",
All changes will be applied to the working tables and will need to be staged using {{.EmphasisLeft}}dolt add{{.EmphasisRight}} and committed using {{.EmphasisLeft}}dolt commit{{.EmphasisRight}}.
`,
Synopsis: []string{
"[-f] [{{.LessThan}}commit{{.GreaterThan}}] {{.LessThan}}oldtable{{.GreaterThan}} {{.LessThan}}newtable{{.GreaterThan}}",
},
}
type CpCmd struct{}
@@ -57,7 +56,7 @@ func (cmd CpCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd CpCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, tblCpShortDesc, tblCpLongDesc, tblCpSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, tblCpDocs, ap))
}
func (cmd CpCmd) createArgParser() *argparser.ArgParser {
@@ -77,7 +76,7 @@ func (cmd CpCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd CpCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, tblCpShortDesc, tblCpLongDesc, tblCpSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblCpDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() < 2 || apr.NArg() > 3 {
+11 -7
View File
@@ -21,6 +21,7 @@ import (
"github.com/fatih/color"
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
@@ -30,12 +31,15 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/iohelp"
)
var exportShortDesc = `Export the contents of a table to a file.`
var exportLongDesc = `dolt table export will export the contents of <table> to <file>
var exportDocs = cli.CommandDocumentationContent{
ShortDesc: `Export the contents of a table to a file.`,
LongDesc: `{{.EmphasisLeft}}dolt table export{{.EmphasisRight}} will export the contents of {{.LessThan}}table{{.GreaterThan}} to {{.LessThan}}|file{{.GreaterThan}}
See the help for <b>dolt table import</b> as the options are the same.`
var exportSynopsis = []string{
"[-f] [-pk <field>] [-schema <file>] [-map <file>] [-continue] [-file-type <type>] <table> <file>",
See the help for {{.EmphasisLeft}}dolt table import{{.EmphasisRight}} as the options are the same.
`,
Synopsis: []string{
"[-f] [-pk {{.LessThan}}field{{.GreaterThan}}] [-schema {{.LessThan}}file{{.GreaterThan}}] [-map {{.LessThan}}file{{.GreaterThan}}] [-continue] [-file-type {{.LessThan}}type{{.GreaterThan}}] {{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}",
},
}
// validateExportArgs validates the input from the arg parser, and returns the tuple:
@@ -87,7 +91,7 @@ func validateExportArgs(apr *argparser.ArgParseResults, usage cli.UsagePrinter)
}
func parseExportArgs(ap *argparser.ArgParser, commandStr string, args []string) (bool, *mvdata.MoveOptions) {
help, usage := cli.HelpAndUsagePrinters(commandStr, exportShortDesc, exportLongDesc, exportSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, exportDocs, ap))
apr := cli.ParseArgs(ap, args, help)
tableName, tableLoc, fileLoc := validateExportArgs(apr, usage)
@@ -126,7 +130,7 @@ func (cmd ExportCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ExportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, exportShortDesc, exportLongDesc, exportSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, exportDocs, ap))
}
func (cmd ExportCmd) createArgParser() *argparser.ArgParser {
+21 -37
View File
@@ -63,11 +63,7 @@ var SchemaFileHelp = "Schema definition files are json files in the format:" + `
]
}
where "fields" is the array of columns in each row of the table
"constraints" is a list of table constraints. (Only primary_key constraint types are supported currently)
FIELD_NAME is the name of a column in a row and can be any valid string
KIND must be a supported noms kind (bool, string, uuid, uint, int, float)
INTEGER_FIELD_INDEX must be the 0 based index of the primary key in the "fields" array
where "fields" is the array of columns in each row of the table "constraints" is a list of table constraints. Only primary_key constraint types are supported currently. FIELD_NAME is the name of a column in a row and can be any valid string KIND must be a supported noms kind (bool, string, uuid, uint, int, float) INTEGER_FIELD_INDEX must be the 0 based index of the primary key in the "fields" array
`
var MappingFileHelp = "A mapping file is json in the format:" + `
@@ -80,46 +76,34 @@ var MappingFileHelp = "A mapping file is json in the format:" + `
where source_field_name is the name of a field in the file being imported and dest_field_name is the name of a field in the table being imported to.
`
var importShortDesc = `Imports data into a dolt table`
var importLongDesc = `If <b>--create-table | -c</b> is given the operation will create <table> and import the contents of file into it. If a
table already exists at this location then the operation will fail, unless the <b>--force | -f</b> flag is provided. The
force flag forces the existing table to be overwritten.
var importDocs = cli.CommandDocumentationContent{
ShortDesc: `Imports data into a dolt table`,
LongDesc: `If {{.EmphasisLeft}}--create-table | -c{{.EmphasisRight}} is given the operation will create {{.LessThan}}table{{.GreaterThan}} and import the contents of file into it. If a table already exists at this location then the operation will fail, unless the {{.EmphasisLeft}}--force | -f{{.EmphasisRight}} flag is provided. The force flag forces the existing table to be overwritten.
The schema for the new table can be specified explicitly by providing a schema definition file, or will be inferred
from the imported file. All schemas, inferred or explicitly defined must define a primary key. If the file format
being imported does not support defining a primary key, then the <b>--pk</b> parameter must supply the name of the
field that should be used as the primary key.
The schema for the new table can be specified explicitly by providing a schema definition file, or will be inferred from the imported file. All schemas, inferred or explicitly defined must define a primary key. If the file format being imported does not support defining a primary key, then the {{.EmphasisLeft}}--pk{{.EmphasisRight}} parameter must supply the name of the field that should be used as the primary key.
` + SchemaFileHelp +
`
If <b>--update-table | -u</b> is given the operation will update <table> with the contents of file. The table's existing
schema will be used, and field names will be used to match file fields with table fields unless a mapping file is specified.
`
If {{.EmphasisLeft}}--update-table | -u{{.EmphasisRight}} is given the operation will update {{.LessThan}}table{{.GreaterThan}} with the contents of file. The table's existing schema will be used, and field names will be used to match file fields with table fields unless a mapping file is specified.
During import, if there is an error importing any row, the import will be aborted by default. Use the <b>--continue</b>
flag to continue importing when an error is encountered.
During import, if there is an error importing any row, the import will be aborted by default. Use the {{.EmphasisLeft}}--continue{{.EmphasisRight}} flag to continue importing when an error is encountered.
If <b>--replace-table | -r</b> is given the operation will replace <table> with the contents of the file. The table's
existing schema will be used, and field names will be used to match file fields with table fields unless a mapping file is
specified.
If {{.EmphasisLeft}}--replace-table | -r{{.EmphasisRight}} is given the operation will replace {{.LessThan}}table{{.GreaterThan}} with the contents of the file. The table's existing schema will be used, and field names will be used to match file fields with table fields unless a mapping file is specified.
If the schema for the existing table does not match the schema for the new file, the import will be aborted by default. To
overwrite both the table and the schema, use <b>-c -f</b>.
If the schema for the existing table does not match the schema for the new file, the import will be aborted by default. To overwrite both the table and the schema, use {{.EmphasisLeft}}-c -f{{.EmphasisRight}}.
A mapping file can be used to map fields between the file being imported and the table being written to. This can
be used when creating a new table, or updating or replacing an existing table.
A mapping file can be used to map fields between the file being imported and the table being written to. This can be used when creating a new table, or updating or replacing an existing table.
` + MappingFileHelp +
`
In create, update, and replace scenarios the file's extension is used to infer the type of the file. If a file does not
have the expected extension then the <b>--file-type</b> parameter should be used to explicitly define the format of
the file in one of the supported formats (csv, psv, json, xlsx). For files separated by a delimiter other than a
',' (type csv) or a '|' (type psv), the --delim parameter can be used to specify a delimeter`
`
In create, update, and replace scenarios the file's extension is used to infer the type of the file. If a file does not have the expected extension then the {{.EmphasisLeft}}--file-type{{.EmphasisRight}} parameter should be used to explicitly define the format of the file in one of the supported formats (csv, psv, json, xlsx). For files separated by a delimiter other than a ',' (type csv) or a '|' (type psv), the --delim parameter can be used to specify a delimeter`,
var importSynopsis = []string{
"-c [-f] [--pk <field>] [--schema <file>] [--map <file>] [--continue] [--file-type <type>] <table> <file>",
"-u [--map <file>] [--continue] [--file-type <type>] <table> <file>",
"-r [--map <file>] [--file-type <type>] <table> <file>",
Synopsis: []string{
"-c [-f] [--pk {{.LessThan}}field{{.GreaterThan}}] [--schema {{.LessThan}}file{{.GreaterThan}}] [--map {{.LessThan}}file{{.GreaterThan}}] [--continue] [--file-type {{.LessThan}}type{{.GreaterThan}}] {{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}",
"-u [--map {{.LessThan}}file{{.GreaterThan}}] [--continue] [--file-type {{.LessThan}}type{{.GreaterThan}}] {{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}",
"-r [--map {{.LessThan}}file{{.GreaterThan}}] [--file-type {{.LessThan}}type{{.GreaterThan}}] {{.LessThan}}table{{.GreaterThan}} {{.LessThan}}file{{.GreaterThan}}",
},
}
func getMoveParameters(apr *argparser.ArgParseResults) (mvdata.MoveOperation, mvdata.TableDataLocation, mvdata.DataLocation, interface{}) {
@@ -246,7 +230,7 @@ func (cmd ImportCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ImportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, importShortDesc, importLongDesc, importSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, importDocs, ap))
}
func (cmd ImportCmd) createArgParser() *argparser.ArgParser {
@@ -263,7 +247,7 @@ func (cmd ImportCmd) EventType() eventsapi.ClientEventType {
func (cmd ImportCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, importShortDesc, importLongDesc, importSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, importDocs, ap))
apr := cli.ParseArgs(ap, args, help)
err := validateImportArgs(apr)
@@ -302,7 +286,7 @@ func createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{tableParam, "The new or existing table being imported to."})
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{fileParam, "The file being imported. Supported file types are csv, psv, and nbf."})
ap.SupportsFlag(createParam, "c", "Create a new table, or overwrite an existing table (with the -f flag) from the imported data.")
ap.SupportsFlag(createParam, "c", "Create a new table, or overwrite an existing table (with the {{.EmphasisLeft}}-f{{.EmphasisRight}} flag) from the imported data.")
ap.SupportsFlag(updateParam, "u", "Update an existing table with the imported data.")
ap.SupportsFlag(forceParam, "f", "If a create operation is being executed, data already exists in the destination, the Force flag will allow the target to be overwritten.")
ap.SupportsFlag(replaceParam, "r", "Replace existing table with imported data while preserving the original schema.")
+12 -10
View File
@@ -27,18 +27,20 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var tblMvShortDesc = "Renames a table"
var tblMvLongDesc = `
var tblMvDocs = cli.CommandDocumentationContent{
ShortDesc: "Renames a table",
LongDesc: `
The dolt table mv command will rename a table. If a table exists with the target name this command will
fail unless the <b>--force|-f</b> flag is provided. In that case the table at the target location will be overwritten
fail unless the {{.EmphasisLeft}}--force|-f{{.EmphasisRight}} flag is provided. In that case the table at the target location will be overwritten
by the table being renamed.
The result is equivalent of running <b>dolt table cp <old> <new></b> followed by <b>dolt table rm <old></b>, resulting
in a new table and a deleted table in the working set. These changes can be staged using <b>dolt add</b> and committed
using <b>dolt commit</b>.`
The result is equivalent of running {{.EmphasisLeft}}dolt table cp <old> <new>{{.EmphasisRight}} followed by {{.EmphasisLeft}}dolt table rm <old>{{.EmphasisRight}}, resulting
in a new table and a deleted table in the working set. These changes can be staged using {{.EmphasisLeft}}dolt add{{.EmphasisRight}} and committed
using {{.EmphasisLeft}}dolt commit{{.EmphasisRight}}.`,
var tblMvSynopsis = []string{
"[-f] <oldtable> <newtable>",
Synopsis: []string{
"[-f] {{.LessThan}}oldtable{{.EmphasisRight}} {{.LessThan}}newtable{{.EmphasisRight}}",
},
}
type MvCmd struct{}
@@ -56,7 +58,7 @@ func (cmd MvCmd) Description() string {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd MvCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, tblMvShortDesc, tblMvLongDesc, tblMvSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, tblMvDocs, ap))
}
func (cmd MvCmd) createArgParser() *argparser.ArgParser {
@@ -75,7 +77,7 @@ func (cmd MvCmd) EventType() eventsapi.ClientEventType {
// Exec executes the command
func (cmd MvCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, tblMvShortDesc, tblMvLongDesc, tblMvSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblMvDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() != 2 {
+8 -7
View File
@@ -28,11 +28,12 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
)
var tblRmShortDesc = "Removes table(s) from the working set of tables."
var tblRmLongDesc = "dolt table rm removes table(s) from the working set. These changes can be staged using " +
"<b>dolt add</b> and committed using <b>dolt commit</b>"
var tblRmSynopsis = []string{
"<table>...",
var tblRmDocs = cli.CommandDocumentationContent{
ShortDesc: "Removes table(s) from the working set of tables.",
LongDesc: "{{.EmphasisLeft}}dolt table rm{{.EmphasisRight}} removes table(s) from the working set. These changes can be staged using {{.EmphasisLeft}}dolt add{{.EmphasisRight}} and committed using {{.EmphasisLeft}}dolt commit{{.EmphasisRight}}",
Synopsis: []string{
"{{.LessThan}}table{{.GreaterThan}}...",
},
}
type RmCmd struct{}
@@ -55,7 +56,7 @@ func (cmd RmCmd) EventType() eventsapi.ClientEventType {
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd RmCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return cli.CreateMarkdown(fs, path, commandStr, tblRmShortDesc, tblRmLongDesc, tblRmSynopsis, ap)
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, tblRmDocs, ap))
}
func (cmd RmCmd) createArgParser() *argparser.ArgParser {
@@ -67,7 +68,7 @@ func (cmd RmCmd) createArgParser() *argparser.ArgParser {
// Exec executes the command
func (cmd RmCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(commandStr, tblRmShortDesc, tblRmLongDesc, tblRmSynopsis, ap)
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblRmDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() == 0 {
+1 -1
View File
@@ -40,7 +40,7 @@ import (
)
const (
Version = "0.15.0"
Version = "0.15.1"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}
+19 -24
View File
@@ -1,8 +1,7 @@
module github.com/liquidata-inc/dolt/go
require (
cloud.google.com/go/pubsub v1.2.0 // indirect
cloud.google.com/go/storage v1.5.0
cloud.google.com/go/storage v1.6.0
github.com/BurntSushi/toml v0.3.1
github.com/DataDog/datadog-go v3.4.0+incompatible // indirect
github.com/OneOfOne/xxhash v1.2.7 // indirect
@@ -13,10 +12,11 @@ require (
github.com/armon/go-metrics v0.3.2 // indirect
github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496 // indirect
github.com/attic-labs/kingpin v2.2.7-0.20180312050558-442efcfac769+incompatible
github.com/aws/aws-sdk-go v1.29.1
github.com/aws/aws-sdk-go v1.29.28
github.com/bcicen/jstream v0.0.0-20190220045926-16c1f8af81c2
github.com/cenkalti/backoff v2.2.1+incompatible
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf
github.com/cpuguy83/go-md2man v1.0.10 // indirect
github.com/denisbrodbeck/machineid v1.0.1
github.com/dustin/go-humanize v1.0.0
github.com/fatih/color v1.9.0
@@ -26,7 +26,7 @@ require (
github.com/go-sql-driver/mysql v1.5.0
github.com/gocraft/dbr/v2 v2.7.0
github.com/gogo/protobuf v1.3.1 // indirect
github.com/golang/protobuf v1.3.3
github.com/golang/protobuf v1.3.5
github.com/golang/snappy v0.0.1
github.com/google/go-cmp v0.4.0
github.com/google/uuid v1.1.1
@@ -43,12 +43,12 @@ require (
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6
github.com/kr/pretty v0.2.0 // indirect
github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi v0.0.0-20200207221941-8e60e239e1fb
github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi v0.0.0-20200320155049-a8e482faeffd
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0
github.com/liquidata-inc/mmap-go v1.0.3
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15
github.com/mattn/go-isatty v0.0.12
github.com/mattn/go-runewidth v0.0.8
github.com/mattn/go-runewidth v0.0.9
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b
github.com/miekg/dns v1.1.27 // indirect
github.com/pelletier/go-toml v1.6.0 // indirect
@@ -57,32 +57,27 @@ require (
github.com/prometheus/client_golang v1.4.1 // indirect
github.com/rivo/uniseg v0.1.0
github.com/sergi/go-diff v1.1.0 // indirect
github.com/shirou/gopsutil v2.20.1+incompatible
github.com/shopspring/decimal v0.0.0-20200105231215-408a2507e114
github.com/shirou/gopsutil v2.20.2+incompatible
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc
github.com/sirupsen/logrus v1.4.2
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/cast v1.3.1 // indirect
github.com/spf13/cobra v0.0.5
github.com/spf13/cobra v0.0.6
github.com/spf13/pflag v1.0.5 // indirect
github.com/src-d/go-mysql-server v0.6.0
github.com/stretchr/testify v1.4.0
github.com/stretchr/testify v1.5.1
github.com/tealeg/xlsx v1.0.5
github.com/tidwall/pretty v1.0.1 // indirect
github.com/uber/jaeger-client-go v2.22.1+incompatible // indirect
github.com/uber/jaeger-lib v2.2.0+incompatible // indirect
go.mongodb.org/mongo-driver v1.3.0 // indirect
go.uber.org/atomic v1.5.1 // indirect
go.uber.org/multierr v1.4.0 // indirect
go.uber.org/zap v1.13.0
golang.org/x/crypto v0.0.0-20200210222208-86ce3cb69678
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd // indirect
golang.org/x/net v0.0.0-20200202094626-16171245cfb2
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56 // indirect
google.golang.org/api v0.17.0
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce // indirect
google.golang.org/grpc v1.27.1
go.uber.org/zap v1.14.1
golang.org/x/crypto v0.0.0-20200320145329-97fc981609be
golang.org/x/net v0.0.0-20200319234117-63522dbf7eec
golang.org/x/sys v0.0.0-20200317113312-5766fd39f98d
google.golang.org/api v0.20.0
google.golang.org/grpc v1.28.0
gopkg.in/square/go-jose.v2 v2.4.1
gopkg.in/src-d/go-errors.v1 v1.0.0
gopkg.in/yaml.v2 v2.2.8 // indirect
@@ -93,8 +88,8 @@ require (
replace github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi => ./gen/proto/dolt/services/eventsapi
replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200226213608-d094902116be
replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200318155626-711be775a19d
replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f
replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54
go 1.13
+88 -10
View File
@@ -9,6 +9,8 @@ cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.52.0 h1:GGslhk/BU052LPlnI1vpp3fcbUs+hQ3E+Doti/3/vF8=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0 h1:MZQCQQaRwOrAcuKjiHWHrgKykt4fZyuwF2dtiG3fGW8=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go/bigquery v1.0.1 h1:hL+ycaJpVE9M7nLoiXb/Pn10ENE2u+oddxbD8uu0ZVU=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0 h1:sAbMqjY1PEQKZBWfbu6Y6bsupJ9c4QdHnzg/VvYTLcE=
@@ -17,6 +19,7 @@ cloud.google.com/go/bigquery v1.4.0 h1:xE3CPsOgttP4ACBePh79zTKALtXwn/Edhcr16R5hM
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/datastore v1.0.0 h1:Kt+gOPPp2LEPWp8CSfxhsM8ik9CcyE/gYu+0r+RnZvM=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/pubsub v1.0.1 h1:W9tAK3E57P75u0XLLR82LZyw8VpAnhmyTOxW9qzmyj8=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0 h1:9/vpR43S4aJaROxqQHQ3nH9lfyKKV0dC3vOmnw8ebQQ=
@@ -27,6 +30,8 @@ cloud.google.com/go/storage v1.0.0 h1:VV2nUM3wwLLGh9lSABFgZMjInyUbJeaRSE64WuAIQ+
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0 h1:RPUcBvDeYgQFMfQu1eBMq6piD1SXmLH+vK3qjewZPus=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0 h1:UDpwYIwla4jHGzZJaEJYx1tOejbgSoNqsAfHAUYe2r8=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
@@ -81,6 +86,8 @@ github.com/attic-labs/kingpin v2.2.7-0.20180312050558-442efcfac769+incompatible/
github.com/aws/aws-sdk-go v0.0.0-20180223184012-ebef4262e06a/go.mod h1:ZRmQr0FajVIyZ4ZzBYKG5P3ZqPz9IHG41ZoMu1ADI3k=
github.com/aws/aws-sdk-go v1.29.1 h1:U2vZ5WprhGAMjzb4bKVzl2QecUtZFW2BXVqa5bnd+OY=
github.com/aws/aws-sdk-go v1.29.1/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
github.com/aws/aws-sdk-go v1.29.28 h1:4eKUDBN+v1yxpGDxxPY+FG2Abc6yJB6vvkEDRJ9jIW0=
github.com/aws/aws-sdk-go v1.29.28/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
github.com/bcicen/jstream v0.0.0-20190220045926-16c1f8af81c2 h1:M+TYzBcNIRyzPRg66ndEqUMd7oWDmhvdQmaPC6EZNwM=
github.com/bcicen/jstream v0.0.0-20190220045926-16c1f8af81c2/go.mod h1:RDu/qcrnpEdJC/p8tx34+YBFqqX71lB7dOX9QE+ZC4M=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
@@ -105,19 +112,23 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cockroachdb/cmux v0.0.0-20170110192607-30d10be49292/go.mod h1:qRiX68mZX1lGBkTWyp3CLcenw9I94W2dLeRvMzcn9N4=
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf h1:5ZeQB3mThuz5C2MSER6T5GdtXTF9CMMk42F9BOyRsEQ=
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf/go.mod h1:BO2rLUAZMrpgh6GBVKi0Gjdqw2MgCtJrtmUdDeZRKjY=
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w=
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v0.0.0-20170626015032-703663d1f6ed/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@@ -130,6 +141,7 @@ github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
@@ -140,8 +152,10 @@ github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BMXYYRWT
github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:rZfgFAXFS/z/lEd6LJmf9HVZ1LkgYiHx5pHhV5DR16M=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v0.0.0-20161207003320-04f313413ffd/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-ini/ini v1.12.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0 h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk=
@@ -197,6 +211,7 @@ github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7 h1:5ZkaAPbicIKTF2I64qf5Fh8Aa83Q/dnOafMYV0OMwjA=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@@ -205,12 +220,15 @@ github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4er
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.5 h1:F768QJ1E9tib+q5Sc8MkdJi1RxLTbRcTf8LJV56aRls=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
@@ -229,6 +247,7 @@ github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXi
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -245,9 +264,12 @@ github.com/gorilla/mux v1.7.0/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc=
github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v0.0.0-20160912153041-2d1e4548da23/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.1.0/go.mod h1:f5nM7jw/oeRSadq3xCzHAvxcr8HZnzsqU6ILg/0NiiE=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v0.0.0-20161128002007-199c40a060d1/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/hashicorp/consul v1.4.0/go.mod h1:mFrjN1mfidgJfYP1xrJCF+AfRhr6Eaqhb2+sfyn/OOI=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -339,18 +361,18 @@ github.com/krishicks/yaml-patch v0.0.10/go.mod h1:Sm5TchwZS6sm7RJoyg87tzxm2ZcKzd
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU=
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200226213608-d094902116be h1:U92XLX9Su5cySJy53dV8SZGP6yBSVMCb8GD4lzoPnZ8=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200226213608-d094902116be/go.mod h1:Lh0pg7jnO08HxFm6oj6gtcSTUeeOTu4Spt50Aeo2mes=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200318155626-711be775a19d h1:lC5yfAkZlVoTjSq8OCydWbH8NvjIYtReh3ros1mvNC0=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200318155626-711be775a19d/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc=
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0 h1:phMgajKClMUiIr+hF2LGt8KRuUa2Vd2GI1sNgHgSXoU=
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0/go.mod h1:YC1rI9k5gx8D02ljlbxDfZe80s/iq8bGvaaQsvR+qxs=
github.com/liquidata-inc/mmap-go v1.0.3 h1:2LndAeAtup9rpvUmu4wZSYCsjCQ0Zpc+NqE+6+PnT7g=
github.com/liquidata-inc/mmap-go v1.0.3/go.mod h1:w0doE7jfkuDEZyxb/zD3VWnRaQBYx1uDTS816kH8HoY=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20191127195537-419365e253e7 h1:ous7ZBxpcwRsEFutY7lBcAD5B7JYvOXS99AfzRIBzBw=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20191127195537-419365e253e7/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1 h1:BDpmbvQ9I8npWe7TOzQcGkrn7EYHrW1hJtTd9h8MNZA=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f h1:fqsJy7h3D3esm9tYSzU7LV6h2tfifdYTanPuDL5LJ1A=
github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200313010318-99cb4d559676 h1:+QjBkLsd1Yj46izNWAWuj/ZamoeR2DpjIJtQcCfTU+I=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200313010318-99cb4d559676/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54 h1:LR/OEhgIYVQuo5a/lxr8Ps76AZ1FNWUgNANfKCA0XSQ=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
@@ -371,6 +393,8 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
github.com/mattn/go-runewidth v0.0.1/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.8 h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0=
github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
@@ -469,10 +493,12 @@ github.com/remyoudompheng/bigfft v0.0.0-20190728182440-6a916e37a237 h1:HQagqIiBm
github.com/remyoudompheng/bigfft v0.0.0-20190728182440-6a916e37a237/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sanity-io/litter v1.2.0 h1:DGJO0bxH/+C2EukzOSBmAlxmkhVMGqzvcx/rvySYw9M=
github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4=
@@ -487,12 +513,17 @@ github.com/shirou/gopsutil v2.18.12+incompatible h1:1eaJvGomDnH74/5cF4CTmTbLHAri
github.com/shirou/gopsutil v2.18.12+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shirou/gopsutil v2.20.1+incompatible h1:oIq9Cq4i84Hk8uQAUOG3eNdI/29hBawGrD5YRl6JRDY=
github.com/shirou/gopsutil v2.20.1+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shirou/gopsutil v2.20.2+incompatible h1:ucK79BhBpgqQxPASyS2cu9HX8cfDVljBN1WWFvbNvgY=
github.com/shirou/gopsutil v2.20.2+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4 h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/shopspring/decimal v0.0.0-20191130220710-360f2bc03045 h1:8CnFGhoe92Izugjok8nZEGYCNovJwdRFYwrEiLtG6ZQ=
github.com/shopspring/decimal v0.0.0-20191130220710-360f2bc03045/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v0.0.0-20200105231215-408a2507e114 h1:Pm6R878vxWWWR+Sa3ppsLce/Zq+JNTs6aVvRu13jv9A=
github.com/shopspring/decimal v0.0.0-20200105231215-408a2507e114/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc h1:jUIKcSPO9MoMJBbEoyE/RJoE8vz7Mb8AjvifMMwSyvY=
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
@@ -502,6 +533,7 @@ github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EE
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
@@ -515,6 +547,8 @@ github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/cobra v0.0.6 h1:breEStsVwemnKh2/s6gMvSdMEkwW0sK8vGStnlVBMCs=
github.com/spf13/cobra v0.0.6/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
@@ -522,6 +556,7 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/src-d/go-oniguruma v1.1.0 h1:EG+Nm5n2JqWUaCjtM0NtutPxU7ZN5Tp50GWrrV8bTww=
github.com/src-d/go-oniguruma v1.1.0/go.mod h1:chVbff8kcVtmrhxtZ3yBVLLquXbzCS6DrxQaAK/CeqM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -533,6 +568,8 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tchap/go-patricia v0.0.0-20160729071656-dd168db6051b/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I=
github.com/tealeg/xlsx v1.0.5 h1:+f8oFmvY8Gw1iUXzPk+kz+4GpbDZPK1FhPiQRd+ypgE=
github.com/tealeg/xlsx v1.0.5/go.mod h1:btRS8dz54TDnvKNosuAqxrM1QgN1udgk9O34bDCnORM=
@@ -540,6 +577,7 @@ github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tidwall/pretty v1.0.1 h1:WE4RBSZ1x6McVVC8S/Md+Qse8YUv6HRObAx6ke00NY8=
github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/uber-go/atomic v1.4.0/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
github.com/uber/jaeger-client-go v2.16.0+incompatible h1:Q2Pp6v3QYiocMxomCaJuwQGFt7E53bPYqEgug/AoBtY=
@@ -550,6 +588,7 @@ github.com/uber/jaeger-lib v2.0.0+incompatible h1:iMSCV0rmXEogjNWPh2D0xk9YVKvrtG
github.com/uber/jaeger-lib v2.0.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/uber/jaeger-lib v2.2.0+incompatible h1:MxZXOiR2JuoANZ3J6DE/U0kSFv/eJ/GfSYVCjK7dyaw=
github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
@@ -558,6 +597,7 @@ github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHM
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/z-division/go-zookeeper v0.0.0-20190128072838-6d7457066b9b/go.mod h1:JNALoWa+nCXR8SmgLluHcBNVJgyejzpKPZk9pX2yXXE=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3 h1:MUGmc65QhB3pIlaQ5bB4LwqSj6GIonVJXpZiaKNyaKk=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.mongodb.org/mongo-driver v1.0.3 h1:GKoji1ld3tw2aC+GX1wbr/J2fX13yNacEYoJ8Nhr0yU=
@@ -576,16 +616,22 @@ go.uber.org/atomic v1.5.0 h1:OI5t8sDa1Or+q8AeE+yKeB/SDYioSHAgcVljj9JIETY=
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.5.1 h1:rsqfU5vBkVknbhUGbAUwQKR2H4ItV8tjJ+6kJX4cxHM=
go.uber.org/atomic v1.5.1/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk=
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.3.0 h1:sFPn2GLc3poCkfrpIXGhBD2X0CMIo4Q/zSULXrj/+uc=
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.4.0 h1:f3WCSC2KzAcBXGATIxAB1E2XuCpNU255wNKZ505qi3E=
go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.5.0 h1:KCa4XfM8CWFCpxXRGok+Q0SS/0XBhMDbHHGABQLvD2A=
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4=
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.13.0 h1:nR6NoDBgAf67s68NhaXbsojM+2gxp3S1hWkHDl27pVU=
go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=
go.uber.org/zap v1.14.1 h1:nYDKopTbvAPq/NrUVZwT15y2lpROBiLLyoRTbXOYWOo=
go.uber.org/zap v1.14.1/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
@@ -601,6 +647,8 @@ golang.org/x/crypto v0.0.0-20190829043050-9756ffdc2472/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200210222208-86ce3cb69678 h1:wCWoJcFExDgyYx2m2hpHgwz8W3+FPdfldvIgzqDIhyg=
golang.org/x/crypto v0.0.0-20200210222208-86ce3cb69678/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200320145329-97fc981609be h1:ZiBigX8kMYHj8eP8qTBHC6kr6mlWLIvjJrOKBMOXTS8=
golang.org/x/crypto v0.0.0-20200320145329-97fc981609be/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -612,6 +660,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd h1:zkO/Lhoka23X63N9OSzpSeROEUQ5ODw47tM3YWjygbs=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -632,6 +682,7 @@ golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKG
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee h1:WG0RUwxtNT4qqaXX3DPA8zHFNm/D9xaBpxzHt1WcA/E=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -640,6 +691,7 @@ golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -648,6 +700,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@@ -662,6 +715,10 @@ golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa h1:F+8P+gmewFQYRk6JoLQLwjBCT
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0 h1:MsuvTghUPjX762sGLnGsxC3HM0B5r83wEtYcYR8/vRs=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200319234117-63522dbf7eec h1:w0SItUiQ4sBiXBAwWNkyu8Fu2Qpn/dtDIcoPkPDqjRw=
golang.org/x/net v0.0.0-20200319234117-63522dbf7eec/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
@@ -706,6 +763,7 @@ golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190926180325-855e68c8590b h1:/8GN4qrAmRZQXgjWZHj9z/UJI5vNqQhPtgcw02z2f+8=
golang.org/x/sys v0.0.0-20190926180325-855e68c8590b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -716,6 +774,11 @@ golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4 h1:sfkvUWPNGwSV+8/fNqctR5lS2AqCSqYwXdrjCxp/dXo=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200317113312-5766fd39f98d h1:62ap6LNOjDU6uGmKXHJbSfciMoV+FeI1sRXx/pLDL44=
golang.org/x/sys v0.0.0-20200317113312-5766fd39f98d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
@@ -752,6 +815,7 @@ golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtn
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
@@ -763,6 +827,8 @@ golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapK
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56 h1:DFtSed2q3HtNuVazwVDZ4nSRS/JrZEig0gz2BY4VNrg=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2 h1:L/G4KZvrQn7FWLN/LlulBtBzrLUhqjiGfTWWDmrh+IQ=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
@@ -776,8 +842,10 @@ google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsb
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0 h1:yzlyyDW/J0w8yNFJIhiAJy4kq74S+1DOLdawELNxFMA=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0 h1:0q95w+VuFtv4PAx4PZVQdBMmYbaCHbnfKaEiDIcVyag=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0 h1:jz2KixHX7EcCPiQrySzPdnYT7DbINAypCqKZ1Z7GM40=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@@ -805,18 +873,22 @@ google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvx
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce h1:1mbrb1tUU+Zmt5C94IGKADBTJZjZXAd+BubWi7r9EiI=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63 h1:YzfoEYWbODU5Fbt37+h7X16BWQbad7Q4S6gclTKFXM8=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA=
google.golang.org/grpc v1.25.1 h1:wdKvqQk7IttEw92GoRyKG2IDrUIpgpj6H6m81yfeMW0=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0 h1:2dTRdpdFEEhJYQD8EMLB61nnrzSCTbG38PhqdhvOltg=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1 h1:zvIju4sqAGvwKspUQOhwnpcqSbzi7/H6QomNNjTL4sk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0 h1:bO/TA4OxCOummhSf10siHuG7vJOiwh7SpRpFZDkOgl4=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -827,11 +899,13 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/ini.v1 v1.41.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ldap.v2 v2.5.0/go.mod h1:oI0cpe/D7HRtBQl8aTg+ZmzFUAvu4lsv3eLXMLGFxWk=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/square/go-jose.v2 v2.4.1 h1:H0TmLt7/KmzlrDOpa1F+zr0Tk90PbJYBfsVUmRLrf9Y=
gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/src-d/go-errors.v1 v1.0.0 h1:cooGdZnCjYbeS1zb1s6pVAAimTdKceRrpn7aKOnNIfc=
gopkg.in/src-d/go-errors.v1 v1.0.0/go.mod h1:q1cBlomlw2FnDBDNGlnh6X0jPihy+QxZfMMNxPCbdYg=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
@@ -845,6 +919,8 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3 h1:sXmLre5bzIR6ypkjXCDI3jHPssRhc8KD/Ome589sc3U=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
modernc.org/mathutil v1.0.0 h1:93vKjrJopTPrtTNpZ8XIovER7iCIH1QU7wNbOQXC60I=
modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k=
modernc.org/mathutil v1.1.0 h1:z3/dTcIoU+Ql+ovBW6FaGAAAFJL48ZqABhGYCkbRpKE=
@@ -854,3 +930,5 @@ modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs
modernc.org/strutil v1.1.0 h1:+1/yCzZxY2pZwwrsbH+4T7BQMoLQ9QiBshRC9eicYsc=
modernc.org/strutil v1.1.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
+25 -5
View File
@@ -406,6 +406,22 @@ func (root *RootValue) GetTableHash(ctx context.Context, tName string) (hash.Has
return tValRef.TargetHash(), true, nil
}
func (root *RootValue) SetTableHash(ctx context.Context, tName string, h hash.Hash) (*RootValue, error) {
val, err := root.vrw.ReadValue(ctx, h)
if err != nil {
return nil, err
}
ref, err := types.NewRef(val, root.vrw.Format())
if err != nil {
return nil, err
}
return putTable(ctx, root, tName, ref)
}
// GetTable will retrieve a table by name
func (root *RootValue) GetTable(ctx context.Context, tName string) (*Table, bool, error) {
if st, ok, err := root.getTableSt(ctx, tName); err != nil {
@@ -586,17 +602,20 @@ func (root *RootValue) PutTable(ctx context.Context, tName string, table *Table)
// PutTable inserts a table by name into the map of tables. If a table already exists with that name it will be replaced
func PutTable(ctx context.Context, root *RootValue, vrw types.ValueReadWriter, tName string, table *Table) (*RootValue, error) {
if !IsValidTableName(tName) {
panic("Don't attempt to put a table with a name that fails the IsValidTableName check")
}
rootValSt := root.valueSt
tableRef, err := writeValAndGetRef(ctx, vrw, table.tableStruct)
if err != nil {
return nil, err
}
return putTable(ctx, root, tName, tableRef)
}
func putTable(ctx context.Context, root *RootValue, tName string, tableRef types.Ref) (*RootValue, error) {
if !IsValidTableName(tName) {
panic("Don't attempt to put a table with a name that fails the IsValidTableName check")
}
tableMap, err := root.getTableMap()
if err != nil {
@@ -612,6 +631,7 @@ func PutTable(ctx context.Context, root *RootValue, vrw types.ValueReadWriter, t
return nil, err
}
rootValSt := root.valueSt
rootValSt, err = rootValSt.Set(tablesKey, m)
if err != nil {
-1
View File
@@ -101,7 +101,6 @@ const (
QueryCatalogDescriptionTag
)
const (
// Currently: `view`.
SchemasTablesTypeCol = "type"
@@ -89,6 +89,10 @@ var FloatComparer = cmp.Comparer(func(x, y types.Float) bool {
return math.Abs(float64(x)-float64(y)) < .001
})
var TimestampComparer = cmp.Comparer(func(x, y types.Timestamp) bool {
return x.Equals(y)
})
// CreateTestTable creates a new test table with the name, schema, and rows given.
func CreateTestTable(t *testing.T, dEnv *env.DoltEnv, tableName string, sch schema.Schema, rs ...row.Row) {
imt := table.NewInMemTable(sch)
@@ -179,7 +179,7 @@ func (m Merge) Exec(t *testing.T, dEnv *env.DoltEnv) error {
assert.NoError(t, err)
assert.NotEqual(t, h1, h2)
tblNames, err := dEnv.MergeWouldStompChanges(context.Background(), cm2)
tblNames, _, err := dEnv.MergeWouldStompChanges(context.Background(), cm2)
if err != nil {
return err
}
+72 -18
View File
@@ -16,6 +16,7 @@ package commitwalk
import (
"context"
"io"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/store/hash"
@@ -23,6 +24,7 @@ import (
type c struct {
commit *doltdb.Commit
hash hash.Hash
height uint64
invisible bool
queued bool
@@ -127,7 +129,8 @@ func (q *q) Get(ctx context.Context, id hash.Hash) (*c, error) {
if err != nil {
return nil, err
}
c := &c{commit: l, height: h}
c := &c{commit: l, height: h, hash: id}
q.loaded[id] = c
return c, nil
}
@@ -190,31 +193,82 @@ func GetTopologicalOrderCommits(ctx context.Context, ddb *doltdb.DoltDB, startCo
return GetTopNTopoOrderedCommits(ctx, ddb, startCommitHash, -1)
}
// GetTopologicalOrderCommitIterator returns an iterator for commits generated with the same semantics as
// GetTopologicalOrderCommits
func GetTopologicalOrderIterator(ctx context.Context, ddb *doltdb.DoltDB, startCommitHash hash.Hash) (doltdb.CommitItr, error) {
return newCommiterator(ctx, ddb, startCommitHash)
}
type commiterator struct {
ddb *doltdb.DoltDB
startCommitHash hash.Hash
q *q
}
var _ doltdb.CommitItr = (*commiterator)(nil)
func newCommiterator(ctx context.Context, ddb *doltdb.DoltDB, startCommitHash hash.Hash) (*commiterator, error) {
itr := &commiterator{
ddb: ddb,
startCommitHash: startCommitHash,
}
err := itr.Reset(ctx)
if err != nil {
return nil, err
}
return itr, nil
}
// Next implements doltdb.CommitItr
func (i *commiterator) Next(ctx context.Context) (hash.Hash, *doltdb.Commit, error) {
if i.q.NumVisiblePending() > 0 {
nextC := i.q.PopPending()
parents, err := nextC.commit.ParentHashes(ctx)
if err != nil {
return hash.Hash{}, nil, err
}
for _, parentID := range parents {
if err := i.q.AddPendingIfUnseen(ctx, parentID); err != nil {
return hash.Hash{}, nil, err
}
}
return nextC.hash, nextC.commit, nil
}
return hash.Hash{}, nil, io.EOF
}
// Reset implements doltdb.CommitItr
func (i *commiterator) Reset(ctx context.Context) error {
i.q = newQueue(i.ddb)
if err := i.q.AddPendingIfUnseen(ctx, i.startCommitHash); err != nil {
return err
}
return nil
}
// GetTopNTopoOrderedCommits returns the first N commits (If N <= 0 then all commits) reachable from the commit at hash
// `startCommitHash` in reverse topological order, with tiebreaking done by the height of the commit graph -- higher
// commits appear first. Remaining ties are broken by timestamp; newer commits appear first.
func GetTopNTopoOrderedCommits(ctx context.Context, ddb *doltdb.DoltDB, startCommitHash hash.Hash, n int) ([]*doltdb.Commit, error) {
var commitList []*doltdb.Commit
q := newQueue(ddb)
if err := q.AddPendingIfUnseen(ctx, startCommitHash); err != nil {
itr, err := GetTopologicalOrderIterator(ctx, ddb, startCommitHash)
if err != nil {
return nil, err
}
for q.NumVisiblePending() > 0 {
nextC := q.PopPending()
parents, err := nextC.commit.ParentHashes(ctx)
if err != nil {
var commitList []*doltdb.Commit
for n < 0 || len(commitList) < n {
_, commit, err := itr.Next(ctx)
if err == io.EOF {
break
} else if err != nil {
return nil, err
}
for _, parentID := range parents {
if err := q.AddPendingIfUnseen(ctx, parentID); err != nil {
return nil, err
}
}
commitList = append(commitList, nextC.commit)
if n > 0 && len(commitList) >= n {
break
}
commitList = append(commitList, commit)
}
return commitList, nil
+8 -8
View File
@@ -439,41 +439,41 @@ func (dEnv *DoltEnv) GetTablesWithConflicts(ctx context.Context) ([]string, erro
return root.TablesInConflict(ctx)
}
func (dEnv *DoltEnv) MergeWouldStompChanges(ctx context.Context, mergeCommit *doltdb.Commit) ([]string, error) {
func (dEnv *DoltEnv) MergeWouldStompChanges(ctx context.Context, mergeCommit *doltdb.Commit) ([]string, map[string]hash.Hash, error) {
headRoot, err := dEnv.HeadRoot(ctx)
if err != nil {
return nil, err
return nil, nil, err
}
workingRoot, err := dEnv.WorkingRoot(ctx)
if err != nil {
return nil, err
return nil, nil, err
}
mergeRoot, err := mergeCommit.GetRootValue()
if err != nil {
return nil, err
return nil, nil, err
}
headTableHashes, err := mapTableHashes(ctx, headRoot)
if err != nil {
return nil, err
return nil, nil, err
}
workingTableHashes, err := mapTableHashes(ctx, workingRoot)
if err != nil {
return nil, err
return nil, nil, err
}
mergeTableHashes, err := mapTableHashes(ctx, mergeRoot)
if err != nil {
return nil, err
return nil, nil, err
}
headWorkingDiffs := diffTableHashes(headTableHashes, workingTableHashes)
@@ -487,7 +487,7 @@ func (dEnv *DoltEnv) MergeWouldStompChanges(ctx context.Context, mergeCommit *do
}
}
return stompedTables, nil
return stompedTables, headWorkingDiffs, nil
}
func mapTableHashes(ctx context.Context, root *doltdb.RootValue) (map[string]hash.Hash, error) {
@@ -17,7 +17,6 @@ package envtestutils
import (
"context"
"fmt"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"testing"
"github.com/stretchr/testify/assert"
@@ -27,6 +26,7 @@ import (
dtu "github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils"
tc "github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils/testcommands"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/encoding"
@@ -17,7 +17,6 @@ package envtestutils
import (
"context"
"fmt"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"io"
"strconv"
"testing"
@@ -30,6 +29,7 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
dtu "github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils"
tc "github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils/testcommands"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
@@ -179,7 +179,6 @@ var UniqueTagsTests = []UniqueTagsTest{
tc.CommitAll{Message: "added columns to both tables on myBranch"},
tc.Checkout{BranchName: "master"},
tc.Merge{BranchName: "myBranch"},
},
TableName: "two",
ExpectedSchema: schema.SchemaFromCols(columnCollection(
+3 -2
View File
@@ -18,11 +18,12 @@ import (
"context"
"errors"
"fmt"
"strings"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"github.com/liquidata-inc/dolt/go/libraries/utils/set"
"github.com/liquidata-inc/dolt/go/store/atomicerr"
"github.com/liquidata-inc/dolt/go/store/hash"
"strings"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/diff"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
@@ -732,7 +733,7 @@ func resolveTagConflicts(ctx context.Context, ddb *doltdb.DoltDB, commit, mergeC
return nil, err
}
sch, err := tbl.GetSchema(ctx)
sch, err := tbl.GetSchema(ctx)
if err != nil {
return nil, err
+1 -1
View File
@@ -594,7 +594,7 @@ func handleSystemTableMappings(ctx context.Context, tblName string, root *doltdb
}
case doltdb.SchemasTableName:
newTagsByColName = map[string]uint64{
doltdb.SchemasTablesTypeCol: doltdb.DoltSchemasTypeTag,
doltdb.SchemasTablesTypeCol: doltdb.DoltSchemasTypeTag,
doltdb.SchemasTablesNameCol: doltdb.DoltSchemasNameTag,
doltdb.SchemasTablesFragmentCol: doltdb.DoltSchemasFragmentTag,
}
@@ -110,6 +110,16 @@ func (tt TaggedValues) Get(tag uint64) (types.Value, bool) {
return val, ok
}
func (tt TaggedValues) GetWithDefault(tag uint64, def types.Value) types.Value {
val, ok := tt[tag]
if !ok {
return def
}
return val
}
func (tt TaggedValues) Set(tag uint64, val types.Value) TaggedValues {
updated := tt.copy()
// Setting a nil value removes the mapping for that tag entirely, rather than setting a nil value. The methods to
+3 -2
View File
@@ -365,8 +365,9 @@ func SuperSchemaSubtract(left, right *SuperSchema) *SuperSchema {
})
return &SuperSchema{cc, tn}
}
// SuperSchemaIntersection returns the logical set intersection of the columns of ss1 and ss2, along with
// the union of each column's name history.
// SuperSchemaIntersection returns the logical set intersection of the columns of ss1 and ss2, along with
// the union of each column's name history.
func SuperSchemaIntersection(ss1, ss2 *SuperSchema) *SuperSchema {
cc, _ := NewColCollection()
tn := make(map[uint64][]string)
+2 -1
View File
@@ -17,8 +17,9 @@ package schema
import (
"crypto/sha512"
"encoding/binary"
"github.com/liquidata-inc/dolt/go/store/types"
"math/rand"
"github.com/liquidata-inc/dolt/go/store/types"
)
const (
+1
View File
@@ -16,6 +16,7 @@ package sql
import (
"fmt"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
)
+41 -20
View File
@@ -16,8 +16,10 @@ package sqltestutil
import (
"context"
"fmt"
"reflect"
"testing"
"time"
"github.com/google/uuid"
@@ -27,6 +29,7 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/envtestutils"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped"
"github.com/liquidata-inc/dolt/go/store/types"
)
@@ -100,7 +103,7 @@ func createEpisodesTestSchema() schema.Schema {
colColl, _ := schema.NewColCollection(
schema.NewColumn("id", EpisodeIdTag, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("name", EpNameTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("air_date", EpAirDateTag, types.IntKind, false),
newColumnWithTypeInfo("air_date", EpAirDateTag, typeinfo.DatetimeType, false),
schema.NewColumn("rating", EpRatingTag, types.FloatKind, false),
)
return schema.SchemaFromCols(colColl)
@@ -115,6 +118,14 @@ func createAppearancesTestSchema() schema.Schema {
return schema.SchemaFromCols(colColl)
}
func newColumnWithTypeInfo(name string, tag uint64, info typeinfo.TypeInfo, partOfPk bool, constraints ...schema.ColConstraint) schema.Column {
col, err := schema.NewColumnWithTypeInfo(name, tag, info, partOfPk, constraints...)
if err != nil {
panic(fmt.Sprintf("unexpected error creating column: %s", err.Error()))
}
return col
}
func NewPeopleRow(id int, first, last string, isMarried bool, age int, rating float64) row.Row {
vals := row.TaggedValues{
IdTag: types.Int(id),
@@ -134,11 +145,11 @@ func NewPeopleRow(id int, first, last string, isMarried bool, age int, rating fl
return r
}
func newEpsRow(id int, name string, airdate int, rating float64) row.Row {
func newEpsRow(id int, name string, airdate string, rating float64) row.Row {
vals := row.TaggedValues{
EpisodeIdTag: types.Int(id),
EpNameTag: types.String(name),
EpAirDateTag: types.Int(airdate),
EpAirDateTag: types.Timestamp(datetimeStrToTimestamp(airdate)),
EpRatingTag: types.Float(rating),
}
@@ -151,6 +162,14 @@ func newEpsRow(id int, name string, airdate int, rating float64) row.Row {
return r
}
func datetimeStrToTimestamp(datetime string) time.Time {
time, err := time.Parse("2006-01-02 15:04:05", datetime)
if err != nil {
panic(fmt.Sprintf("unable to parse datetime %s", datetime))
}
return time
}
func newAppsRow(charId, epId int, comment string) row.Row {
vals := row.TaggedValues{
AppCharacterTag: types.Int(charId),
@@ -199,10 +218,10 @@ var Barney = NewPeopleRowWithOptionalFields(barneyId, "Barney", "Gumble", false,
var AllPeopleRows = Rs(Homer, Marge, Bart, Lisa, Moe, Barney)
// Actually the first 4 episodes of the show
var Ep1 = newEpsRow(1, "Simpsons Roasting On an Open Fire", 629953200, 8.0)
var Ep2 = newEpsRow(2, "Bart the Genius", 632372400, 9.0)
var Ep3 = newEpsRow(3, "Homer's Odyssey", 632977200, 7.0)
var Ep4 = newEpsRow(4, "There's No Disgrace Like Home", 633582000, 8.5)
var Ep1 = newEpsRow(1, "Simpsons Roasting On an Open Fire", "1989-12-18 03:00:00", 8.0)
var Ep2 = newEpsRow(2, "Bart the Genius", "1990-01-15 03:00:00", 9.0)
var Ep3 = newEpsRow(3, "Homer's Odyssey", "1990-01-22 03:00:00", 7.0)
var Ep4 = newEpsRow(4, "There's No Disgrace Like Home", "1990-01-29 03:00:00", 8.5)
var AllEpsRows = Rs(Ep1, Ep2, Ep3, Ep4)
// These are made up, not the actual show data
@@ -276,6 +295,8 @@ func MutateRow(r row.Row, tagsAndVals ...interface{}) row.Row {
nomsVal = types.UUID(v)
case bool:
nomsVal = types.Bool(v)
case time.Time:
nomsVal = types.Timestamp(v)
default:
panic("Unhandled type " + reflect.TypeOf(val).String())
}
@@ -372,10 +393,10 @@ var diffSchema = dtestutils.MustSchema(
const tblName = "test_table"
var initialSch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr)
var addAddrAt3Sch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, addrColTag3TypeStr)
var addAgeAt4Sch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, ageColTag4TypeInt)
var readdAgeAt5Sch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, addrColTag3TypeStr, ageColTag5TypeUint)
var InitialHistSch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr)
var AddAddrAt3HistSch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, addrColTag3TypeStr)
var AddAgeAt4HistSch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, ageColTag4TypeInt)
var ReaddAgeAt5HistSch = dtestutils.MustSchema(idColTag0TypeUUID, firstColTag1TypeStr, lastColTag2TypeStr, addrColTag3TypeStr, ageColTag5TypeUint)
func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envtestutils.HistoryNode {
vrw := dEnv.DoltDB.ValueReadWriter()
@@ -386,8 +407,8 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
CommitMsg: "Seeding with initial user data",
Updates: map[string]envtestutils.TableUpdate{
tblName: {
NewSch: initialSch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, initialSch, []row.TaggedValues{
NewSch: InitialHistSch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, InitialHistSch, []row.TaggedValues{
{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son")},
{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks")},
{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn")},
@@ -400,8 +421,8 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
CommitMsg: "Adding int age to users with tag 3",
Updates: map[string]envtestutils.TableUpdate{
tblName: {
NewSch: addAgeAt4Sch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, addAgeAt4Sch, []row.TaggedValues{
NewSch: AddAgeAt4HistSch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, AddAgeAt4HistSch, []row.TaggedValues{
{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 4: types.Int(35)},
{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 4: types.Int(38)},
{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 4: types.Int(37)},
@@ -416,8 +437,8 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
CommitMsg: "Adding string address to users with tag 3",
Updates: map[string]envtestutils.TableUpdate{
tblName: {
NewSch: addAddrAt3Sch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, addAddrAt3Sch, []row.TaggedValues{
NewSch: AddAddrAt3HistSch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, AddAddrAt3HistSch, []row.TaggedValues{
{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")},
{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")},
{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")},
@@ -432,8 +453,8 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
CommitMsg: "Re-add age as a uint with tag 4",
Updates: map[string]envtestutils.TableUpdate{
tblName: {
NewSch: readdAgeAt5Sch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, readdAgeAt5Sch, []row.TaggedValues{
NewSch: ReaddAgeAt5HistSch,
NewRowData: dtestutils.MustRowData(t, ctx, vrw, ReaddAgeAt5HistSch, []row.TaggedValues{
{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35)},
{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38)},
{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37)},
@@ -456,7 +477,7 @@ func CreateWorkingRootUpdate() map[string]envtestutils.TableUpdate {
return map[string]envtestutils.TableUpdate{
tblName: {
RowUpdates: []row.Row{
mustRow(row.New(types.Format_Default, readdAgeAt5Sch, row.TaggedValues{
mustRow(row.New(types.Format_Default, ReaddAgeAt5HistSch, row.TaggedValues{
0: types.Int(6), 1: types.String("Katie"), 2: types.String("McCulloch"),
})),
},
@@ -181,7 +181,6 @@ var BasicSelectTests = []SelectTest{
ExpectedRows: CompressRows(PeopleTestSchema, Barney, Moe, Lisa, Bart, Marge, Homer),
ExpectedSchema: CompressSchema(PeopleTestSchema),
},
// TODO: float logic seems broken in sql engine
{
Name: "select *, order by float",
Query: "select * from people order by rating",
@@ -718,7 +717,7 @@ var BasicSelectTests = []SelectTest{
Name: "select * from log system table",
Query: "select * from dolt_log",
ExpectedRows: []row.Row{mustRow(row.New(types.Format_7_18, LogSchema, row.TaggedValues{
0: types.String("26e3q4fup0uotaqk24quluf2pmbgmn85"),
0: types.String("0e2b6g3oemme1je6g3l2bm3hr5mhgpa2"),
1: types.String("billy bob"),
2: types.String("bigbillieb@fake.horse"),
3: types.Timestamp(time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC)),
@@ -766,6 +765,135 @@ var SelectDiffTests = []SelectTest{
},
}
var AsOfTests = []SelectTest{
{
Name: "select * from seed branch",
Query: "select * from test_table as of 'seed'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, InitialHistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son")})),
mustRow(row.New(types.Format_7_18, InitialHistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks")})),
mustRow(row.New(types.Format_7_18, InitialHistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn")})),
},
ExpectedSchema: InitialHistSch,
},
{
Name: "select * from add-age branch",
Query: "select * from test_table as of 'add-age'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAgeAt4HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 4: types.Int(35)})),
mustRow(row.New(types.Format_7_18, AddAgeAt4HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 4: types.Int(38)})),
mustRow(row.New(types.Format_7_18, AddAgeAt4HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 4: types.Int(37)})),
mustRow(row.New(types.Format_7_18, AddAgeAt4HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 4: types.Int(37)})),
},
ExpectedSchema: AddAgeAt4HistSch,
},
{
Name: "select * from master branch",
Query: "select * from test_table as of 'master'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 3: types.String("-1 Imaginary Wy"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(5), 1: types.String("Daylon"), 2: types.String("Wilkins")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from HEAD~",
Query: "select * from test_table as of 'HEAD~'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from HEAD^",
Query: "select * from test_table as of 'HEAD^'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from master^",
Query: "select * from test_table as of 'master^'",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
// Because of an implementation detail in the way we process history for test setup, each commit is 2 hours apart.
{
Name: "select * from timestamp after HEAD",
Query: "select * from test_table as of CONVERT('1970-01-01 10:00:00', DATETIME)",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 3: types.String("-1 Imaginary Wy"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(5), 1: types.String("Daylon"), 2: types.String("Wilkins")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from timestamp, HEAD exact",
Query: "select * from test_table as of CONVERT('1970-01-01 08:00:00', DATETIME)",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 3: types.String("-1 Imaginary Wy"), 5: types.Uint(37)})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
mustRow(row.New(types.Format_7_18, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(5), 1: types.String("Daylon"), 2: types.String("Wilkins")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from timestamp, HEAD~ + 1",
Query: "select * from test_table as of CONVERT('1970-01-01 07:00:00', DATETIME)",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from timestamp, HEAD~",
Query: "select * from test_table as of CONVERT('1970-01-01 06:00:00', DATETIME)",
ExpectedRows: []row.Row{
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave")})),
mustRow(row.New(types.Format_7_18, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele")})),
},
ExpectedSchema: ReaddAgeAt5HistSch,
},
{
Name: "select * from timestamp, before table creation",
Query: "select * from test_table as of CONVERT('1970-01-01 02:00:00', DATETIME)",
ExpectedErr: "not found",
},
}
// SQL is supposed to be case insensitive. These are tests of that promise.
var CaseSensitivityTests = []SelectTest{
{
@@ -171,6 +171,24 @@ var BasicUpdateTests = []UpdateTest{
),
ExpectedSchema: CompressSchema(PeopleTestSchema),
},
{
Name: "update datetime field",
UpdateQuery: `update episodes set air_date = "1993-03-24 20:00:00" where id = 1`,
SelectQuery: `select * from episodes where id = 1`,
ExpectedRows: CompressRows(EpisodesTestSchema,
MutateRow(Ep1, EpAirDateTag, datetimeStrToTimestamp("1993-03-24 20:00:00")),
),
ExpectedSchema: CompressSchema(EpisodesTestSchema),
},
{
Name: "update datetime field",
UpdateQuery: `update episodes set name = "fake_name" where id = 1;`,
SelectQuery: `select * from episodes where id = 1;`,
ExpectedRows: CompressRows(EpisodesTestSchema,
MutateRow(Ep1, EpNameTag, "fake_name"),
),
ExpectedSchema: CompressSchema(EpisodesTestSchema),
},
{
Name: "update multiple rows, =",
UpdateQuery: `update people set first_name = "Homer" where last_name = "Simpson"`,
+157 -31
View File
@@ -19,6 +19,7 @@ import (
"fmt"
"io"
"strings"
"time"
"github.com/src-d/go-mysql-server/sql"
"github.com/src-d/go-mysql-server/sql/parse"
@@ -27,6 +28,7 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env/actions/commitwalk"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/alterschema"
dsql "github.com/liquidata-inc/dolt/go/libraries/doltcore/sql"
@@ -50,10 +52,11 @@ type Database struct {
ddb *doltdb.DoltDB
rsr env.RepoStateReader
batchMode batchMode
tables map[string]sql.Table
tables map[*doltdb.RootValue]map[string]sql.Table
}
var _ sql.Database = (*Database)(nil)
var _ sql.VersionedDatabase = (*Database)(nil)
var _ sql.TableDropper = (*Database)(nil)
var _ sql.TableCreator = (*Database)(nil)
var _ sql.TableRenamer = (*Database)(nil)
@@ -66,10 +69,17 @@ func NewDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr en
ddb: ddb,
rsr: rsr,
batchMode: single,
tables: make(map[string]sql.Table),
tables: initTableMap(root),
}
}
func initTableMap(root *doltdb.RootValue) map[*doltdb.RootValue]map[string]sql.Table {
tablesForRoot := make(map[string]sql.Table)
tables := make(map[*doltdb.RootValue]map[string]sql.Table)
tables[root] = tablesForRoot
return tables
}
// NewBatchedDatabase returns a new dolt database executing in batch insert mode. Integrators must call Flush() to
// commit any outstanding edits.
func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) *Database {
@@ -79,7 +89,7 @@ func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB,
ddb: ddb,
rsr: rsr,
batchMode: batched,
tables: make(map[string]sql.Table),
tables: initTableMap(root),
}
}
@@ -90,7 +100,7 @@ func (db *Database) Name() string {
// GetTableInsensitive is used when resolving tables in queries. It returns a best-effort case-insensitive match for
// the table name given.
func (db *Database) GetTableInsensitive(ctx context.Context, tblName string) (sql.Table, bool, error) {
func (db *Database) GetTableInsensitive(ctx *sql.Context, tblName string) (sql.Table, bool, error) {
lwrName := strings.ToLower(tblName)
if strings.HasPrefix(lwrName, DoltDiffTablePrefix) {
tblName = tblName[len(DoltDiffTablePrefix):]
@@ -118,56 +128,168 @@ func (db *Database) GetTableInsensitive(ctx context.Context, tblName string) (sq
return NewLogTable(db.ddb, db.rsr), true, nil
}
tableNames, err := db.GetAllTableNames(ctx)
return db.getTable(ctx, db.root, tblName)
}
// GetTableInsensitiveAsOf implements sql.VersionedDatabase
func (db *Database) GetTableInsensitiveAsOf(ctx *sql.Context, tableName string, asOf interface{}) (sql.Table, bool, error) {
root, err := db.rootAsOf(ctx, asOf)
if err != nil {
return nil, false, err
} else if root == nil {
return nil, false, nil
}
exactName, ok := sql.GetTableNameInsensitive(tblName, tableNames)
return db.getTable(ctx, root, tableName)
}
// rootAsOf returns the root of the DB as of the expression given, which may be nil in the case that it refers to an
// expression before the first commit.
func (db *Database) rootAsOf(ctx *sql.Context, asOf interface{}) (*doltdb.RootValue, error) {
switch x := asOf.(type) {
case string:
return db.getRootForCommitRef(ctx, x)
case time.Time:
return db.getRootForTime(ctx, x)
default:
panic(fmt.Sprintf("unsupported AS OF type %T", asOf))
}
}
func (db *Database) getRootForTime(ctx *sql.Context, asOf time.Time) (*doltdb.RootValue, error) {
cs, err := doltdb.NewCommitSpec("HEAD", db.rsr.CWBHeadRef().String())
if err != nil {
return nil, err
}
cm, err := db.ddb.Resolve(ctx, cs)
if err != nil {
return nil, err
}
hash, err := cm.HashOf()
if err != nil {
return nil, err
}
cmItr, err := commitwalk.GetTopologicalOrderIterator(ctx, db.ddb, hash)
if err != nil {
return nil, err
}
for {
_, curr, err := cmItr.Next(ctx)
if err == io.EOF {
break
} else if err != nil {
return nil, err
}
meta, err := curr.GetCommitMeta()
if err != nil {
return nil, err
}
if meta.Time().Equal(asOf) || meta.Time().Before(asOf) {
return curr.GetRootValue()
}
}
return nil, nil
}
func (db *Database) getRootForCommitRef(ctx *sql.Context, commitRef string) (*doltdb.RootValue, error) {
cs, err := doltdb.NewCommitSpec(commitRef, db.rsr.CWBHeadRef().String())
if err != nil {
return nil, err
}
cm, err := db.ddb.Resolve(ctx, cs)
if err != nil {
return nil, err
}
root, err := cm.GetRootValue()
if err != nil {
return nil, err
}
return root, nil
}
// GetTableNamesAsOf implements sql.VersionedDatabase
func (db *Database) GetTableNamesAsOf(ctx *sql.Context, time interface{}) ([]string, error) {
root, err := db.rootAsOf(ctx, time)
if err != nil {
return nil, err
} else if root == nil {
return nil, nil
}
tblNames, err := root.GetTableNames(ctx)
if err != nil {
return nil, err
}
return filterDoltInternalTables(tblNames), nil
}
// getTable gets the table with the exact name given at the root value given. The database caches tables for all root
// values to avoid doing schema lookups on every table lookup, which are expensive.
func (db *Database) getTable(ctx context.Context, root *doltdb.RootValue, tableName string) (sql.Table, bool, error) {
if tablesForRoot, ok := db.tables[root]; ok {
if table, ok := tablesForRoot[tableName]; ok {
return table, true, nil
}
}
tableNames, err := getAllTableNames(ctx, root)
if err != nil {
return nil, true, err
}
tableName, ok := sql.GetTableNameInsensitive(tableName, tableNames)
if !ok {
return nil, false, nil
}
if table, ok := db.tables[exactName]; ok {
return table, true, nil
}
tbl, ok, err := db.root.GetTable(ctx, exactName)
tbl, ok, err := root.GetTable(ctx, tableName)
if err != nil {
return nil, false, err
} else if !ok {
panic("Name '" + exactName + "' had already been verified... This is a bug")
// Should be impossible
return nil, false, doltdb.ErrTableNotFound
}
sch, err := tbl.GetSchema(ctx)
if err != nil {
return nil, false, err
}
var toReturn sql.Table
var table sql.Table
readonlyTable := DoltTable{name: exactName, table: tbl, sch: sch, db: db}
if doltdb.IsSystemTable(exactName) {
toReturn = &readonlyTable
} else if doltdb.HasDoltPrefix(exactName) {
toReturn = &WritableDoltTable{DoltTable: readonlyTable}
readonlyTable := DoltTable{name: tableName, table: tbl, sch: sch, db: db}
if doltdb.IsSystemTable(tableName) {
table = &readonlyTable
} else if doltdb.HasDoltPrefix(tableName) {
table = &WritableDoltTable{DoltTable: readonlyTable}
} else {
toReturn = &AlterableDoltTable{WritableDoltTable{DoltTable: readonlyTable}}
table = &AlterableDoltTable{WritableDoltTable{DoltTable: readonlyTable}}
}
db.tables[exactName] = toReturn
return toReturn, true, nil
if db.tables[root] == nil {
db.tables[root] = make(map[string]sql.Table)
}
db.tables[root][tableName] = table
return table, true, nil
}
// GetTableNames returns the names of all user tables. System tables in user space (e.g. dolt_docs, dolt_query_catalog)
// are filtered out. This method is used for queries that examine the schema of the database, e.g. show tables. Table
// name resolution in queries is handled by GetTableInsensitive. Use GetAllTableNames for an unfiltered list of all
// tables in user space.
func (db *Database) GetTableNames(ctx context.Context) ([]string, error) {
func (db *Database) GetTableNames(ctx *sql.Context) ([]string, error) {
tblNames, err := db.GetAllTableNames(ctx)
if err != nil {
return nil, err
@@ -177,8 +299,12 @@ func (db *Database) GetTableNames(ctx context.Context) ([]string, error) {
// GetAllTableNames returns all user-space tables, including system tables in user space
// (e.g. dolt_docs, dolt_query_catalog).
func (db *Database) GetAllTableNames(ctx context.Context) ([]string, error) {
return db.root.GetTableNames(ctx)
func (db *Database) GetAllTableNames(ctx *sql.Context) ([]string, error) {
return getAllTableNames(ctx, db.root)
}
func getAllTableNames(ctx context.Context, root *doltdb.RootValue) ([]string, error) {
return root.GetTableNames(ctx)
}
func filterDoltInternalTables(tblNames []string) []string {
@@ -223,7 +349,7 @@ func (db *Database) DropTable(ctx *sql.Context, tableName string) error {
return err
}
delete(db.tables, tableName)
delete(db.tables[db.root], tableName)
db.SetRoot(newRoot)
@@ -312,7 +438,7 @@ func (db *Database) RenameTable(ctx *sql.Context, oldName, newName string) error
return err
}
delete(db.tables, oldName)
delete(db.tables[db.root], oldName)
db.SetRoot(root)
return nil
@@ -320,7 +446,7 @@ func (db *Database) RenameTable(ctx *sql.Context, oldName, newName string) error
// Flush flushes the current batch of outstanding changes and returns any errors.
func (db *Database) Flush(ctx context.Context) error {
for name, table := range db.tables {
for name, table := range db.tables[db.root] {
if writable, ok := table.(*WritableDoltTable); ok {
if err := writable.flushBatchedEdits(ctx); err != nil {
return err
@@ -330,13 +456,13 @@ func (db *Database) Flush(ctx context.Context) error {
return err
}
}
delete(db.tables, name)
delete(db.tables[db.root], name)
}
return nil
}
// CreateView implements sql.ViewCreator. Persists the view in the dolt database, so
// it can exist in a sql session later. Returns sql.ErrExistingView a view
// it can exist in a sql session later. Returns sql.ErrExistingView if a view
// with that name already exists.
func (db *Database) CreateView(ctx *sql.Context, name string, definition string) error {
tbl, err := GetOrCreateDoltSchemasTable(ctx, db)
+1 -1
View File
@@ -392,7 +392,7 @@ func SuperSchemaForAllBranches(ctx context.Context, cmItr doltdb.CommitItr, ddb
return nil, err
}
t, ok, err := wr.GetTable(ctx, tblName)
t, _, ok, err := wr.GetTableInsensitive(ctx, tblName)
if err != nil {
return nil, err
@@ -89,6 +89,10 @@ func NewDoltRecordResult(e *logictest.ResultLogEntry, version string) *DoltResul
result = "not ok"
case logictest.Skipped:
result = "skipped"
case logictest.Timeout:
result = "timeout"
case logictest.DidNotRun:
result = "did not run"
}
return &DoltResultRecord{
Version: version,
+130 -30
View File
@@ -18,6 +18,7 @@ import (
"context"
"github.com/google/uuid"
"gopkg.in/src-d/go-errors.v1"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
@@ -39,6 +40,48 @@ var queryCatalogCols, _ = schema.NewColCollection(
schema.NewColumn(doltdb.QueryCatalogDescriptionCol, doltdb.QueryCatalogDescriptionTag, types.StringKind, false),
)
var ErrQueryNotFound = errors.NewKind("Query '%s' not found")
type SavedQuery struct {
ID string
Name string
Query string
Description string
Order uint64
}
func savedQueryFromKV(id string, valTuple types.Tuple) (SavedQuery, error) {
tv, err := row.ParseTaggedValues(valTuple)
if err != nil {
return SavedQuery{}, err
}
nameVal := tv.GetWithDefault(doltdb.QueryCatalogNameTag, types.String(""))
queryVal := tv.GetWithDefault(doltdb.QueryCatalogQueryTag, types.String(""))
descVal := tv.GetWithDefault(doltdb.QueryCatalogDescriptionTag, types.String(""))
orderVal := tv.GetWithDefault(doltdb.QueryCatalogOrderTag, types.Uint(0))
return SavedQuery{
ID: id,
Name: string(nameVal.(types.String)),
Query: string(queryVal.(types.String)),
Description: string(descVal.(types.String)),
Order: uint64(orderVal.(types.Uint)),
}, nil
}
func (sq SavedQuery) asRow() (row.Row, error) {
taggedVals := make(row.TaggedValues)
taggedVals[doltdb.QueryCatalogIdTag] = types.String(sq.ID)
taggedVals[doltdb.QueryCatalogOrderTag] = types.Uint(sq.Order)
taggedVals[doltdb.QueryCatalogNameTag] = types.String(sq.Name)
taggedVals[doltdb.QueryCatalogQueryTag] = types.String(sq.Query)
taggedVals[doltdb.QueryCatalogDescriptionTag] = types.String(sq.Description)
return row.New(types.Format_Default, DoltQueryCatalogSchema, taggedVals)
}
var DoltQueryCatalogSchema = schema.SchemaFromCols(queryCatalogCols)
// Creates the query catalog table if it doesn't exist.
@@ -55,36 +98,65 @@ func createQueryCatalogIfNotExists(ctx context.Context, root *doltdb.RootValue)
return root, nil
}
// NewQueryCatalogEntry saves a new entry in the query catalog table and returns the new root value. An ID will be
// NewQueryCatalogEntryWithRandID saves a new entry in the query catalog table and returns the new root value. An ID will be
// chosen automatically.
func NewQueryCatalogEntry(ctx context.Context, root *doltdb.RootValue, name, query, description string) (*doltdb.RootValue, error) {
func NewQueryCatalogEntryWithRandID(ctx context.Context, root *doltdb.RootValue, name, query, description string) (SavedQuery, *doltdb.RootValue, error) {
uid, err := uuid.NewRandom()
if err != nil {
return SavedQuery{}, nil, err
}
// Use the last 12 hex digits of the uuid for the ID.
uidStr := uid.String()
id := uidStr[len(uidStr)-12:]
return newQueryCatalogEntry(ctx, root, id, name, query, description)
}
// NewQueryCatalogEntryWithNameAsID saves an entry in the query catalog table and returns the new root value. If an
// entry with the given name is already present, it will be overwritten.
func NewQueryCatalogEntryWithNameAsID(ctx context.Context, root *doltdb.RootValue, name, query, description string) (SavedQuery, *doltdb.RootValue, error) {
return newQueryCatalogEntry(ctx, root, name, name, query, description)
}
func newQueryCatalogEntry(ctx context.Context, root *doltdb.RootValue, id, name, query, description string) (SavedQuery, *doltdb.RootValue, error) {
root, err := createQueryCatalogIfNotExists(ctx, root)
if err != nil {
return nil, err
return SavedQuery{}, nil, err
}
tbl, _, err := root.GetTable(ctx, doltdb.DoltQueryCatalogTableName)
if err != nil {
return nil, err
}
uid, err := uuid.NewRandom()
if err != nil {
return nil, err
return SavedQuery{}, nil, err
}
data, err := tbl.GetRowData(ctx)
if err != nil {
return nil, err
return SavedQuery{}, nil, err
}
order := getMaxQueryOrder(data, ctx) + 1
existingSQ, err := RetrieveFromQueryCatalog(ctx, root, id)
// Use the last 12 hex digits of the uuid for the ID.
id := uid.String()[24:]
r, err := newQueryCatalogRow(id, order, name, query, description)
if err != nil {
return nil, err
if !ErrQueryNotFound.Is(err) {
return SavedQuery{}, nil, err
}
} else {
order = existingSQ.Order
}
sq := SavedQuery{
ID: id,
Name: name,
Query: query,
Description: description,
Order: order,
}
r, err := sq.asRow()
if err != nil {
return SavedQuery{}, nil, err
}
me := data.Edit()
@@ -92,25 +164,63 @@ func NewQueryCatalogEntry(ctx context.Context, root *doltdb.RootValue, name, que
updatedTable, err := me.Map(ctx)
if err != nil {
return nil, err
return SavedQuery{}, nil, err
}
newTable, err := tbl.UpdateRows(ctx, updatedTable)
if err != nil {
return nil, err
return SavedQuery{}, nil, err
}
return doltdb.PutTable(ctx, root, root.VRW(), doltdb.DoltQueryCatalogTableName, newTable)
root, err = doltdb.PutTable(ctx, root, root.VRW(), doltdb.DoltQueryCatalogTableName, newTable)
if err != nil {
return SavedQuery{}, nil, err
}
return sq, root, err
}
func RetrieveFromQueryCatalog(ctx context.Context, root *doltdb.RootValue, id string) (SavedQuery, error) {
tbl, ok, err := root.GetTable(ctx, doltdb.DoltQueryCatalogTableName)
if err != nil {
return SavedQuery{}, err
} else if !ok {
return SavedQuery{}, doltdb.ErrTableNotFound
}
m, err := tbl.GetRowData(ctx)
if err != nil {
return SavedQuery{}, err
}
k, err := types.NewTuple(root.VRW().Format(), types.Uint(doltdb.QueryCatalogIdTag), types.String(id))
if err != nil {
return SavedQuery{}, err
}
val, ok, err := m.MaybeGet(ctx, k)
if err != nil {
return SavedQuery{}, err
} else if !ok {
return SavedQuery{}, ErrQueryNotFound.New(id)
}
return savedQueryFromKV(id, val.(types.Tuple))
}
// Returns the largest order entry in the catalog
func getMaxQueryOrder(data types.Map, ctx context.Context) uint {
maxOrder := uint(0)
func getMaxQueryOrder(data types.Map, ctx context.Context) uint64 {
maxOrder := uint64(0)
data.IterAll(ctx, func(key, value types.Value) error {
r, _ := row.FromNoms(DoltQueryCatalogSchema, key.(types.Tuple), value.(types.Tuple))
orderVal, ok := r.GetColVal(doltdb.QueryCatalogOrderTag)
if ok {
order := uint(orderVal.(types.Uint))
order := uint64(orderVal.(types.Uint))
if order > maxOrder {
maxOrder = order
}
@@ -119,13 +229,3 @@ func getMaxQueryOrder(data types.Map, ctx context.Context) uint {
})
return maxOrder
}
func newQueryCatalogRow(id string, order uint, name, query, description string) (row.Row, error) {
taggedVals := make(row.TaggedValues)
taggedVals[doltdb.QueryCatalogIdTag] = types.String(id)
taggedVals[doltdb.QueryCatalogOrderTag] = types.Uint(order)
taggedVals[doltdb.QueryCatalogNameTag] = types.String(name)
taggedVals[doltdb.QueryCatalogQueryTag] = types.String(query)
taggedVals[doltdb.QueryCatalogDescriptionTag] = types.String(description)
return row.New(types.Format_Default, DoltQueryCatalogSchema, taggedVals)
}
@@ -38,8 +38,17 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
require.NoError(t, err)
require.False(t, ok)
root, err = NewQueryCatalogEntry(ctx, root, "name", "select 1 from dual", "description")
queryStr := "select 1 from dual"
sq, root, err := NewQueryCatalogEntryWithRandID(ctx, root, "name", queryStr, "description")
require.NoError(t, err)
require.True(t, sq.ID != "")
assert.Equal(t, queryStr, sq.Query)
assert.Equal(t, "name", sq.Name)
assert.Equal(t, "description", sq.Description)
retrieved, err := RetrieveFromQueryCatalog(ctx, root, sq.ID)
require.NoError(t, err)
assert.Equal(t, sq, retrieved)
_, ok, err = root.GetTable(ctx, doltdb.DoltQueryCatalogTableName)
require.NoError(t, err)
@@ -53,8 +62,17 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
assert.Equal(t, expectedRows, rows)
root, err = NewQueryCatalogEntry(ctx, root, "name2", "select 2 from dual", "description2")
queryStr2 := "select 2 from dual"
sq2, root, err := NewQueryCatalogEntryWithNameAsID(ctx, root, "name2", queryStr2, "description2")
require.NoError(t, err)
assert.Equal(t, "name2", sq2.ID)
assert.Equal(t, "name2", sq2.Name)
assert.Equal(t, queryStr2, sq2.Query)
assert.Equal(t, "description2", sq2.Description)
retrieved2, err := RetrieveFromQueryCatalog(ctx, root, sq2.ID)
require.NoError(t, err)
assert.Equal(t, sq2, retrieved2)
rows, err = ExecuteSelect(root, "select display_order, query, name, description from "+doltdb.DoltQueryCatalogTableName+" order by display_order")
require.NoError(t, err)
@@ -71,4 +89,13 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
assert.NotEmpty(t, r)
assert.NotEmpty(t, r[0])
}
queryStr3 := "select 3 from dual"
sq3, root, err := NewQueryCatalogEntryWithNameAsID(ctx, root, "name2", queryStr3, "description3")
require.NoError(t, err)
assert.Equal(t, "name2", sq3.ID)
assert.Equal(t, "name2", sq3.Name)
assert.Equal(t, queryStr3, sq3.Query)
assert.Equal(t, "description3", sq3.Description)
assert.Equal(t, sq2.Order, sq3.Order)
}
+9 -9
View File
@@ -15,18 +15,18 @@
package sqle
import (
"context"
"fmt"
dsql "github.com/liquidata-inc/dolt/go/libraries/doltcore/sql"
"github.com/liquidata-inc/dolt/go/store/types"
"context"
"fmt"
"strconv"
"strings"
"strings"
"github.com/src-d/go-mysql-server/sql"
"github.com/src-d/go-mysql-server/sql"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
dsql "github.com/liquidata-inc/dolt/go/libraries/doltcore/sql"
"github.com/liquidata-inc/dolt/go/store/types"
)
// doltSchemaToSqlSchema returns the sql.Schema corresponding to the dolt schema given.
@@ -21,8 +21,6 @@ import (
dsql "github.com/liquidata-inc/dolt/go/libraries/doltcore/sql"
)
// The fixed schema for the `dolt_schemas` table.
func SchemasTableSchema() sql.Schema {
return []*sql.Column{
+1 -1
View File
@@ -260,7 +260,7 @@ func rowsEqual(expected, actual row.Row) (bool, string) {
panic(err)
}
opts := cmp.Options{cmp.AllowUnexported(), dtestutils.FloatComparer}
opts := cmp.Options{cmp.AllowUnexported(), dtestutils.FloatComparer, dtestutils.TimestampComparer}
eq := cmp.Equal(er, ar, opts)
var diff string
if !eq {
+2 -2
View File
@@ -220,8 +220,8 @@ func TestCreateTable(t *testing.T) {
schemaNewColumn(t, "age", 10, sql.Int32, false)),
},
{
name: "Test faulty tag comments",
query: `create table testTable (
name: "Test faulty tag comments",
query: `create table testTable (
id int primary key comment 'tag:a', age int comment 'this is my personal area')`,
expectedTable: "testTable",
expectedSchema: dtestutils.CreateSchema(
+12 -3
View File
@@ -38,7 +38,7 @@ const singleSelectQueryTest = "" //"Natural join with join clause"
// Set to false to run tests known to be broken
const skipBrokenSelect = true
func TestExecuteSelect(t *testing.T) {
func TestSelect(t *testing.T) {
for _, test := range BasicSelectTests {
t.Run(test.Name, func(t *testing.T) {
testSelectQuery(t, test)
@@ -46,7 +46,7 @@ func TestExecuteSelect(t *testing.T) {
}
}
func TestExecuteSelectDiff(t *testing.T) {
func TestDiffQueries(t *testing.T) {
for _, test := range SelectDiffTests {
t.Run(test.Name, func(t *testing.T) {
testSelectDiffQuery(t, test)
@@ -54,6 +54,15 @@ func TestExecuteSelectDiff(t *testing.T) {
}
}
func TestAsOfQueries(t *testing.T) {
for _, test := range AsOfTests {
t.Run(test.Name, func(t *testing.T) {
// AS OF queries use the same history as the diff tests, so exercise the same test setup
testSelectDiffQuery(t, test)
})
}
}
func TestJoins(t *testing.T) {
for _, tt := range JoinTests {
t.Run(tt.Name, func(t *testing.T) {
@@ -141,7 +150,7 @@ type testCommitClock struct {
func (tcc *testCommitClock) Now() time.Time {
now := time.Unix(0, tcc.unixNano)
tcc.unixNano += int64(time.Millisecond)
tcc.unixNano += int64(time.Hour)
return now
}
+9 -3
View File
@@ -29,9 +29,15 @@ var ErrDuplicatePrimaryKeyFmt = "duplicate primary key given: (%v)"
// tableEditor supports making multiple row edits (inserts, updates, deletes) to a table. It does error checking for key
// collision etc. in the Close() method, as well as during Insert / Update.
// Right now a table editor allows you to combine inserts, updates, and deletes in any order, and makes reasonable
// attempts to produce correct results when doing so. But this probably (definitely) doesn't work in every case, and
// higher-level clients should carefully flush the editor when necessary (i.e. before an update after many inserts).
//
// The tableEditor has two levels of batching: one supported at the SQL engine layer where a single UPDATE, DELETE or
// INSERT statement will touch many rows, and we want to avoid unnecessary intermediate writes; and one at the dolt
// layer as a "batch mode" in DoltDatabase. In the latter mode, it's possible to mix inserts, updates and deletes in any
// order. In general, this is unsafe and will produce incorrect results in many cases. The editor makes reasonable
// attempts to produce correct results when interleaving insert and delete statements, but this is almost entirely to
// support REPLACE statements, which are implemented as a DELETE followed by an INSERT. In general, not flushing the
// editor after every SQL statement is incorrect and will return incorrect results. The single reliable exception is an
// unbroken chain of INSERT statements, where we have taken pains to batch writes to speed things up.
type tableEditor struct {
t *WritableDoltTable
ed *types.MapEditor
+1 -1
View File
@@ -18,7 +18,6 @@ import (
"context"
"errors"
"fmt"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
"io"
"github.com/src-d/go-mysql-server/sql"
@@ -27,6 +26,7 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/alterschema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/liquidata-inc/dolt/go/store/types"
)
+5 -5
View File
@@ -15,12 +15,12 @@
package table
import (
"context"
"errors"
"io"
"context"
"errors"
"io"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
)
// TableReader is an interface for reading rows from a table
@@ -17,6 +17,7 @@ package resultset
import (
"errors"
"fmt"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rowconv"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"