Merge remote-tracking branch 'origin/master' into aaron/dolt-creds-create-perms

This commit is contained in:
Aaron Son
2020-04-16 08:04:06 -07:00
91 changed files with 3916 additions and 833 deletions

View File

@@ -1,8 +1,8 @@
FROM golang:1.13.5-buster as builder
FROM golang:1.14.2-buster as builder
WORKDIR /root/building/go
COPY ./go/ .
RUN go mod vendor
RUN go build -mod=vendor -o dolt ./cmd/dolt
ENV GOFLAGS="-mod=readonly"
RUN go build -o dolt ./cmd/dolt
FROM ubuntu:18.04
COPY --from=builder /root/building/go/dolt /usr/local/bin/dolt

View File

@@ -85,18 +85,18 @@ teardown() {
@test "dolt sql all manner of inserts" {
run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,6,6,6,6,6)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "select * from test"
[[ "$output" =~ "6" ]] || false
run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (1,7,7,7,7,7),(2,8,8,8,8,8)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
run dolt sql -q "select * from test"
[[ "$output" =~ "7" ]] || false
[[ "$output" =~ "8" ]] || false
run dolt sql -q "insert into test (pk,c1,c3,c5) values (3,9,9,9)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "select * from test"
[[ "$output" =~ "9" ]] || false
run dolt sql -q "insert into test (c1,c3,c5) values (50,55,60)"
@@ -119,7 +119,7 @@ teardown() {
@test "dolt sql insert no columns specified" {
run dolt sql -q "insert into test values (0,0,0,0,0,0)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "select * from test"
[[ "$output" =~ "0" ]] || false
run dolt sql -q "insert into test values (4,1,2)"
@@ -142,21 +142,23 @@ teardown() {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,6,6,6,6,6)"
run dolt sql -q "replace into test (pk,c1,c2,c3,c4,c5) values (0,7,7,7,7,7),(1,8,8,8,8,8)"
[ "$status" -eq 0 ]
[[ "${lines[1]}" =~ "updated" ]] || false
# No skip, but this is a bug in the output. Query produces the right result, but counts it incorrectly
[[ "$output" =~ "Query OK, 4 rows affected" ]] || false
## No skip, but this should report 3 but is reporting 4 [[ "${lines[3]}" =~ "3" ]] || false
run dolt sql -q "select * from test"
[[ "$output" =~ "7" ]] || false
[[ "$output" =~ "8" ]] || false
[[ ! "$output" =~ "6" ]] || false
skip "replace into output is incorrect"
}
@test "dolt sql insert and dolt sql select" {
run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (101,102,103,104,105,106),(1,6,7,8,9,10)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ \|[[:space:]]+c5 ]] || false
@@ -221,8 +223,31 @@ teardown() {
[ "$status" -eq 0 ]
[[ "$output" =~ 'column1' ]] || false
[ "${#lines[@]}" -eq 2 ]
# Test that null values are properly output
dolt sql -q "insert into test (pk,c1) values (40,1)"
run dolt sql -q "select c1 as column1, c2 as column2, c3 as column3 from test where pk = 40" -r csv
[ "$status" -eq 0 ]
[[ "$output" =~ "column1,column2,column3" ]] || false
[[ "$output" =~ "1,," ]] || false
}
@test "dolt sql select json output" {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
run dolt sql -q "select c1 as column1, c2 as column2 from test" -r json
[ "$status" -eq 0 ]
[ "$output" == '{"rows": [{"column1":1,"column2":2},{"column1":11,"column2":12},{"column1":21,"column2":22}]}' ]
run dolt sql -q "select c1 as column1 from test where c1=1" -r json
[ "$status" -eq 0 ]
[ "$output" == '{"rows": [{"column1":1}]}' ]
# Test that null values are properly handled
dolt sql -q "insert into test (pk,c1) values (40,1)"
run dolt sql -q "select c1 as column1, c2 as column2, c3 as column3 from test where pk = 40" -r json
[ "$status" -eq 0 ]
[ "$output" == '{"rows": [{"column1":1}]}' ]
}
@test "dolt sql select with inverted where clause" {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
@@ -235,7 +260,8 @@ teardown() {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
run dolt sql -q "update test set c1=6,c2=7,c3=8,c4=9,c5=10 where pk=0"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 | 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
[[ "$output" =~ "Rows matched: 1 Changed: 1 Warnings: 0" ]] || false
run dolt sql -q "select * from test where pk=0"
[ "$status" -eq 0 ]
[[ "$output" =~ "10" ]] || false
@@ -243,14 +269,16 @@ teardown() {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (4,11,12,13,14,15)"
run dolt sql -q "update test set c2=11,c3=11,c4=11,c5=11 where c1=11"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 | 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
[[ "$output" =~ "Rows matched: 2 Changed: 2 Warnings: 0" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "11" ]] || false
[[ ! "$output" =~ "12" ]] || false
run dolt sql -q "update test set c2=50,c3=50,c4=50,c5=50 where c1=50"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 0 | 0 |" ]] || false
[[ "$output" =~ "Query OK, 0 rows affected" ]] || false
[[ "$output" =~ "Rows matched: 0 Changed: 0 Warnings: 0" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ ! "$output" =~ "50" ]] || false
@@ -262,7 +290,8 @@ teardown() {
[ "$output" = "unable to cast \"foo\" of type string to int64" ]
run dolt sql -q "update test set c1=100,c2=100,c3=100,c4=100,c5=100 where pk>0"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 3 | 3 |" ]] || false
[[ "$output" =~ "Query OK, 3 rows affected" ]] || false
[[ "$output" =~ "Rows matched: 3 Changed: 3 Warnings: 0" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "100" ]] || false
@@ -274,24 +303,24 @@ teardown() {
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
run dolt sql -q "delete from test where pk=2"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "delete from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
run dolt sql -q "delete from test where pk>0"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
run dolt sql -q "delete from test where c1=1"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values (0,1,2,3,4,5),(1,11,12,13,14,15),(2,21,22,23,24,25)"
run dolt sql -q "delete from test where c10=1"
[ "$status" -eq 1 ]
[ "$output" = "column \"c10\" could not be found in any table in scope" ]
run dolt sql -q "delete from test where c1='foo'"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 0 |" ]] || false
[[ "$output" =~ "Query OK, 0 rows affected" ]] || false
}
@test "dolt checkout to put a table back to its checked in state" {

View File

@@ -48,7 +48,7 @@ teardown() {
@test "interact with a strings type table with sql" {
run dolt sql -q "insert into test (pk,c1,c2,c3,c4,c5) values ('tim','is','super','duper','rad','fo sho')"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "c5" ]] || false

View File

@@ -76,14 +76,14 @@ teardown() {
@test "interact with a multiple primary key table with sql" {
run dolt sql -q "insert into test (pk1,pk2,c1,c2,c3,c4,c5) values (0,0,6,6,6,6,6)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 1 |" ]] || false
[[ "$output" =~ "Query OK, 1 row affected" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "c5" ]] || false
[[ "$output" =~ "6" ]] || false
run dolt sql -q "insert into test (pk1,pk2,c1,c2,c3,c4,c5) values (0,1,7,7,7,7,7),(1,0,8,8,8,8,8)"
[ "$status" -eq 0 ]
[[ "$output" =~ "| 2 |" ]] || false
[[ "$output" =~ "Query OK, 2 rows affected" ]] || false
run dolt sql -q "select * from test"
[ "$status" -eq 0 ]
[[ "$output" =~ "c5" ]] || false
@@ -103,4 +103,4 @@ teardown() {
run dolt sql -q "insert into test (c1,c2,c3,c4,c5) values (6,6,6,6,6)"
[ "$status" -eq 1 ]
[ "$output" = "column name 'pk1' is non-nullable but attempted to set default value of null" ] || false
}
}

View File

@@ -60,7 +60,7 @@ function export_tables() {
places
do
dolt table export "$table" "$table$1.csv"
dolt sql -r csv -q "select * from $table" > "$table$1.sql.csv"
dolt sql -r csv -q "select * from $table" | sed 's/<NULL>//g' > "$table$1.sql.csv"
done
}
@@ -99,7 +99,7 @@ local_bin="`pwd`"/"$bin"
PATH="$local_bin":"$PATH" dolt clone Liquidata/corona-virus
pushd "corona-virus"
PATH="$local_bin":"$PATH" export_tables "-pre"
dolt migrate
time dolt migrate
export_tables "-post"
diff_tables
echo "success!"

View File

@@ -0,0 +1,16 @@
load helper/windows-compat
if [ -z "$BATS_TMPDIR" ]; then
export BATS_TMPDIR=$HOME/batstmp/
mkdir $BATS_TMPDIR
fi
setup_common() {
echo "setup" > /dev/null
}
teardown_common() {
echo "teardown" > /dev/null
}
dolt config --global --add metrics.disabled true > /dev/null 2>&1

View File

@@ -0,0 +1,24 @@
nativepath() { echo "$1"; }
nativevar() { eval export "$1"="$2"; }
skiponwindows() { :; }
IS_WINDOWS=false
if [ -d /mnt/c/Windows/System32 ]; then
IS_WINDOWS=true
if [ ! -d /mnt/c/batstmp ]; then
mkdir /mnt/c/batstmp
fi
BATS_TMPDIR=`TMPDIR=/mnt/c/batstmp mktemp -d -t dolt-bats-tests-XXXXXX`
export BATS_TMPDIR
nativepath() {
wslpath -w "$1"
}
nativevar() {
eval export "$1"="$2"
export WSLENV="$1$3"
}
skiponwindows() {
skip "$1"
}
fi

View File

@@ -0,0 +1,69 @@
#!/bin/bash
set -eo pipefail
function download_release() {
ver=$1
dirname=binaries/"$ver"
mkdir "$dirname"
basename=dolt-"$PLATFORM_TUPLE"
filename="$basename".tar.gz
filepath=binaries/"$ver"/"$filename"
url="https://github.com/liquidata-inc/dolt/releases/download/$ver/$filename"
curl -L -o "$filepath" "$url"
cd "$dirname" && tar zxf "$filename"
echo "$dirname"/"$basename"/bin
}
get_platform_tuple() {
OS=$(uname)
ARCH=$(uname -m)
if [ "$OS" != Linux -a "$OS" != Darwin ]; then
echo "tests only support linux or macOS." 1>&2
exit 1
fi
if [ "$ARCH" != x86_64 -a "$ARCH" != i386 -a "$ARCH" != i686 ]; then
echo "tests only support x86_64 or x86." 1>&2
exit 1
fi
if [ "$OS" == Linux ]; then
PLATFORM_TUPLE=linux
else
PLATFORM_TUPLE=darwin
fi
if [ "$ARCH" == x86_64 ]; then
PLATFORM_TUPLE="$PLATFORM_TUPLE"-amd64
else
PLATFORM_TUPLE="$PLATFORM_TUPLE"-386
fi
echo "$PLATFORM_TUPLE"
}
PLATFORM_TUPLE=`get_platform_tuple`
setup_test_repos() {
./setup_repo.sh "$1"
mkdir "$1-remote"
pushd "$1"
dolt remote add origin "file://../$1-remote"
# branches created in setup_repo.sh
dolt push origin init
dolt push origin master
dolt push origin other
popd
dolt clone "file://$1-remote" "$1-clone"
}
TOP_DIR=`pwd`
function cleanup() {
pushd $TOP_DIR
rm -rf binaries
rm -rf repo*
popd
}
mkdir binaries
trap cleanup "EXIT"
bin=`download_release "v0.15.2"`
PATH="`pwd`"/"$bin":"$PATH" setup_test_repos "repo"
TEST_REPO="repo" bats migrate.bats

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_common
}
teardown() {
teardown_common
}
@test "dolt migrate --push & dolt migrate --pull" {
pushd "$TEST_REPO"
run dolt migrate --pull
[ "$status" -ne "0" ]
[[ "$output" =~ "Local repo must be migrated before pulling, run 'dolt migrate'" ]] || false
run dolt migrate --push
[ "$status" -ne "0" ]
[[ "$output" =~ "Local repo must be migrated before pushing, run 'dolt migrate'" ]] || false
run dolt migrate
[ "$status" -eq "0" ]
[[ "$output" =~ "Migrating repository to the latest format" ]] || false
run dolt migrate --pull
[ "$status" -ne "0" ]
[[ "$output" =~ "Remote origin has not been migrated" ]] || false
[[ "$output" =~ "Run 'dolt migrate --push origin' to push migration" ]] || false
run dolt migrate --push
[ "$status" -eq "0" ]
popd
pushd "$TEST_REPO-clone"
run dolt migrate --pull
[ "$status" -ne "0" ]
[[ "$output" =~ "Local repo must be migrated before pulling, run 'dolt migrate'" ]] || false
run dolt migrate --push
[ "$status" -ne "0" ]
[[ "$output" =~ "Local repo must be migrated before pushing, run 'dolt migrate'" ]] || false
run dolt migrate
[ "$status" -eq "0" ]
[[ "$output" =~ "Migrating repository to the latest format" ]] || false
run dolt migrate --push
[ "$status" -ne "0" ]
[[ "$output" =~ "Remote origin has been migrated" ]] || false
[[ "$output" =~ "Run 'dolt migrate --pull' to update refs" ]] || false
run dolt migrate --pull
[ "$status" -eq "0" ]
popd
}

View File

@@ -0,0 +1,67 @@
#!/bin/bash
set -eo pipefail
mkdir "$1"
cd "$1"
dolt init
dolt sql <<SQL
CREATE TABLE abc (
pk BIGINT NOT NULL COMMENT 'tag:0',
a LONGTEXT COMMENT 'tag:100',
b DOUBLE COMMENT 'tag:101',
w BIGINT COMMENT 'tag:102',
x BIGINT COMMENT 'tag:103',
PRIMARY KEY (pk)
);
INSERT INTO abc VALUES (0, 'asdf', 1.1, 0, 0);
INSERT INTO abc VALUES (1, 'asdf', 1.1, 0, 0);
INSERT INTO abc VALUES (2, 'asdf', 1.1, 0, 0);
SQL
dolt add .
dolt commit -m "initialized data"
dolt branch init
dolt branch other
dolt sql <<SQL
DELETE FROM abc WHERE pk=1;
INSERT INTO abc VALUES (3, 'data', 1.1, 0, 0);
ALTER TABLE abc DROP COLUMN w;
ALTER TABLE abc ADD COLUMN y BIGINT COMMENT 'tag:104';
SQL
dolt add .
dolt commit -m "made changes to master"
dolt checkout other
dolt sql <<SQL
DELETE FROM abc WHERE pk=2;
INSERT INTO abc VALUES (4, 'data', 1.1, 0, 0);
ALTER TABLE abc DROP COLUMN x;
ALTER TABLE abc ADD COLUMN z BIGINT COMMENT 'tag:105';
SQL
dolt add .
dolt commit -m "made changes to other"
dolt checkout master
dolt table export abc abc.csv
dolt schema export abc abc_schema.json
# add info to the log
echo
echo "dolt status"
dolt status
echo
echo "dolt branch"
dolt branch
echo
echo "dolt schema show"
dolt schema show
echo
echo "dolt sql -q 'select * from abc;'"
dolt sql -q 'select * from abc;'

View File

@@ -163,3 +163,17 @@ teardown() {
dolt sql -q 'drop table abc2'
}
@test "dolt migrate no-data" {
# this will fail for older dolt versions but BATS will swallow the error
run dolt migrate
dolt checkout no-data
run dolt sql -q 'show tables;'
[ "$status" -eq 0 ]
[[ "$output" =~ "+-------+" ]] || false
[[ "$output" =~ "| Table |" ]] || false
[[ "$output" =~ "+-------+" ]] || false
[[ "$output" =~ "+-------+" ]] || false
}

View File

@@ -7,6 +7,8 @@ cd "$1"
dolt init
dolt branch no-data
dolt sql <<SQL
CREATE TABLE abc (
pk BIGINT NOT NULL COMMENT 'tag:0',

View File

@@ -451,6 +451,59 @@ SQL
[ "$status" -eq 1 ]
}
@test "create a table with a SQL reserved word" {
dolt sql <<SQL
CREATE TABLE test (
pk INT NOT NULL,
\`all\` INT,
\`select\` INT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq 0 ]
[[ "$output" =~ "all" ]] || false
[[ "$output" =~ "select" ]] || false
run dolt sql <<SQL
CREATE TABLE test (
pk INT NOT NULL,
all INT,
select INT,
PRIMARY KEY (pk)
);
SQL
[ "$status" -ne 0 ]
}
@test "create a table with a SQL keyword that is not reserved" {
dolt sql <<SQL
CREATE TABLE test (
pk INT NOT NULL,
\`comment\` INT,
\`date\` INT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq 0 ]
[[ "$output" =~ "comment" ]] || false
[[ "$output" =~ "date" ]] || false
run dolt sql <<SQL
CREATE TABLE test (
pk INT NOT NULL,
comment INT,
date INT,
PRIMARY KEY (pk)
);
SQL
skip "Current SQL parser requires backticks around keywords, not just reserved words"
[ "$status" -eq 0 ]
run dolt schema show
[ "$status" -eq 0 ]
[[ "$output" =~ "comment" ]] || false
[[ "$output" =~ "date" ]] || false
}
@test "import a table with non UTF-8 characters in it" {
run dolt table import -c --pk=pk test `batshelper bad-characters.csv`
skip "Dolt allows you to create tables with non-UTF-8 characters right now"

View File

@@ -95,3 +95,35 @@ teardown() {
perms=$(ls -l "$file" | awk '{print $1}')
[ "$perms" == "-rw-------" ]
}
@test "can import cred from good jwk file" {
dolt creds import `batshelper known-good.jwk`
}
@test "can import cred from good jwk stdin" {
dolt creds import <"$BATS_TEST_DIRNAME/helper/known-good.jwk"
}
@test "import cred of corrupted jwk from file fails" {
run dolt creds import `batshelper known-truncated.jwk`
[ "$status" -eq 1 ]
run dolt creds import `batshelper known-decapitated.jwk`
[ "$status" -eq 1 ]
run dolt creds import does-not-exist
[ "$status" -eq 1 ]
}
@test "import cred of corrupted jwk from stdin fails" {
run dolt creds import <"$BATS_TEST_DIRNAME/helper/known-truncated.jwk"
[ "$status" -eq 1 ]
run dolt creds import <"$BATS_TEST_DIRNAME/helper/known-decapitated.jwk"
[ "$status" -eq 1 ]
run dolt creds import </dev/null
[ "$status" -eq 1 ]
}
@test "import cred with already used cred does not replace used cred" {
pubkey=`dolt creds new | grep 'pub key:' | awk '{print $3}'`
dolt creds import `batshelper known-good.jwk`
dolt creds ls -v | grep '*' | grep "$pubkey"
}

View File

@@ -13,6 +13,7 @@ setup() {
dolt remote add test-remote $REMOTE
dolt push test-remote master
export DOLT_HEAD_COMMIT=`get_head_commit`
skiponwindows "git-dolt tests are flaky on Windows"
}
teardown() {
@@ -24,6 +25,7 @@ teardown() {
@test "git dolt install sets up a smudge filter in the current git repository" {
init_git_repo
run git dolt install
[ "$status" -eq 0 ]
[[ "${lines[0]}" =~ "Installed git-dolt smudge filter" ]] || false

View File

@@ -0,0 +1,4 @@
pk, test_date
0, 2013-09-24 00:01:35
1, "2011-10-24 13:17:42"
2, 2018-04-13
1 pk test_date
2 0 2013-09-24 00:01:35
3 1 2011-10-24 13:17:42
4 2 2018-04-13

View File

@@ -0,0 +1 @@
O5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYYw4Bw==","kty":"OKP","crv":"Ed25519"}

1
bats/helper/known-good.jwk Executable file
View File

@@ -0,0 +1 @@
{"d":"7sPHtB3FE7aJVNh2WW65ZnUI4ACA_WN_w-3QhmGMOAc=","x":"jmWVlqO5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYYw4Bw==","kty":"OKP","crv":"Ed25519"}

View File

@@ -0,0 +1 @@
{"d":"7sPHtB3FE7aJVNh2WW65ZnUI4ACA_WN_w-3QhmGMOAc=","x":"jmWVlqO5P55nw4uSrbFKBPciCtihlIDlMPjKVqEb0HTuw8e0HcUTtolU2HZZbrlmdQjgAID9Y3_D7dCGYY

View File

@@ -226,6 +226,6 @@ NOT_VALID_REPO_ERROR="The current directory is not a valid dolt repository."
@test "all versions of help work outside a repository" {
dolt checkout --help
dolt checkout -help
skip "No dashes in front of help segfaults right now"
dolt checkout help
run dolt checkout help
[ "$status" -ne 0 ]
}

View File

@@ -148,7 +148,7 @@ teardown() {
}
@test "schema import with strings in csv" {
# This CSV has queoted integers for the primary key ie "0","foo",... and
# This CSV has quoted integers for the primary key ie "0","foo",... and
# "1","bar",...
run dolt schema import -r --keep-types --pks=pk test `batshelper 1pk5col-strings.csv`
[ "$status" -eq 0 ]
@@ -163,3 +163,11 @@ teardown() {
[[ "$output" =~ "\`c6\` LONGTEXT" ]] || false
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
}
@test "schema import supports dates andf times" {
run dolt schema import -c --pks=pk test `batshelper 1pk-datetime.csv`
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 6 ]
skip "schema import does not support datetime"
[[ "$output" =~ "DATETIME" ]] || false;
}

View File

@@ -29,7 +29,7 @@ seed_repos_with_tables_with_use_statements() {
}
@test "sql multi-db test show databases" {
EXPECTED=$(echo -e "Database\nrepo1\nrepo2")
EXPECTED=$(echo -e "Database\ninformation_schema\nrepo1\nrepo2")
run dolt sql -r csv --multi-db-dir ./ -q "SHOW DATABASES"
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
@@ -82,4 +82,4 @@ seed_repos_with_tables_with_use_statements() {
echo \"\"\"$output\"\"\"
[ "$status" -eq 0 ]
[[ "$output" =~ "$EXPECTED" ]] || false
}
}

View File

@@ -160,6 +160,38 @@ teardown() {
[[ "$output" =~ "not found" ]] || false
}
@test "sql output formats" {
dolt sql <<SQL
CREATE TABLE test (
a int primary key,
b float,
c varchar(80),
d datetime
);
SQL
dolt sql <<SQL
insert into test values (1, 1.5, "1", "2020-01-01");
insert into test values (2, 2.5, "2", "2020-02-02");
insert into test values (3, NULL, "3", "2020-03-03");
insert into test values (4, 4.5, NULL, "2020-04-04");
insert into test values (5, 5.5, "5", NULL);
SQL
run dolt sql -r csv -q "select * from test order by a"
[ $status -eq 0 ]
[[ "$output" =~ "a,b,c,d" ]] || false
[[ "$output" =~ '1,1.5,1,2020-01-01 00:00:00 +0000 UTC' ]] || false
[[ "$output" =~ '2,2.5,2,2020-02-02 00:00:00 +0000 UTC' ]] || false
[[ "$output" =~ '3,,3,2020-03-03 00:00:00 +0000 UTC' ]] || false
[[ "$output" =~ '4,4.5,,2020-04-04 00:00:00 +0000 UTC' ]] || false
[[ "$output" =~ '5,5.5,5,' ]] || false
[ "${#lines[@]}" -eq 6 ]
run dolt sql -r json -q "select * from test order by a"
[ $status -eq 0 ]
[ "$output" == '{"rows": [{"a":1,"b":1.5,"c":"1","d":{}},{"a":2,"b":2.5,"c":"2","d":{}},{"a":3,"c":"3","d":{}},{"a":4,"b":4.5,"d":{}},{"a":5,"b":5.5,"c":"5"}]}' ]
}
@test "sql ambiguous column name" {
run dolt sql -q "select pk,pk1,pk2 from one_pk,two_pk where c1=0"
[ "$status" -eq 1 ]
@@ -593,4 +625,4 @@ SQL
skip run dolt sql -q "INSERT INTO test (col_a,col_b,col_c) VALUES ('a','','b') ON DUPLICATE KEY UPDATE col_a = col_a, col_b = col_b, col_c = VALUES(col_c);"
[ $status -eq 0 ]
[[ ! "$output" =~ 'unsupported feature' ]] || false
}
}

View File

@@ -12,14 +12,14 @@ teardown() {
@test "types: BIGINT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BIGINT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BIGINT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` BIGINT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` BIGINT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 4611686018427387903);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -37,14 +37,14 @@ SQL
@test "types: BIGINT UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BIGINT UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BIGINT UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` BIGINT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` BIGINT UNSIGNED" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 9223372036854775807);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -64,14 +64,14 @@ SQL
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BINARY(10) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BINARY(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` BINARY(10) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` BINARY(10)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -87,14 +87,14 @@ SQL
@test "types: BIT(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BIT(10) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BIT(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` BIT(10) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` BIT(10)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 511);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -113,14 +113,14 @@ SQL
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BLOB COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BLOB,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` BLOB COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` BLOB" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -131,17 +131,30 @@ SQL
[[ "${lines[3]}" =~ " 1234567890 " ]] || false
}
@test "types: BOOLEAN" {
@test "types: BOOL" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v BOOLEAN COMMENT 'tag:1',
pk BIGINT NOT NULL,
v BOOL,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYINT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TINYINT" ]] || false
}
@test "types: BOOLEAN" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v BOOLEAN,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYINT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, true);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -155,14 +168,14 @@ SQL
@test "types: CHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v CHAR(10) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v CHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` CHAR(10) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` CHAR(10)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -175,17 +188,43 @@ SQL
[ "$status" -eq "1" ]
}
@test "types: DATE" {
@test "types: CHARACTER(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DATE COMMENT 'tag:1',
pk BIGINT NOT NULL,
v CHARACTER(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DATE COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` CHAR(10)" ]] || false
}
@test "types: CHARACTER VARYING(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v CHARACTER VARYING(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10)" ]] || false
}
@test "types: DATE" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v DATE,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DATE" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -207,14 +246,14 @@ SQL
@test "types: DATETIME" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DATETIME COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DATETIME,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DATETIME COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DATETIME" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -233,46 +272,82 @@ SQL
[ "$status" -eq "1" ]
}
@test "types: DECIMAL" {
skip "This is not yet persisted in dolt"
@test "types: DEC" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DECIMAL COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DEC,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(10, 0) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
}
@test "types: DEC(9)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v DEC(9),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
}
@test "types: DEC(9,5)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v DEC(9,5),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
}
@test "types: DECIMAL" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v DECIMAL,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
}
@test "types: DECIMAL(9)" {
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DECIMAL(9) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DECIMAL(9),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9, 0) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
}
@test "types: DECIMAL(9, 5)" {
skip "This is not yet persisted in dolt"
@test "types: DECIMAL(9,5)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DECIMAL(10, 5) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DECIMAL(9,5),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(10, 5) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 1234.56789);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -288,14 +363,14 @@ SQL
@test "types: DOUBLE" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v DOUBLE COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DOUBLE,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DOUBLE COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 1.25);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -315,18 +390,30 @@ SQL
[ "$status" -eq "1" ]
}
@test "types: ENUM('a','b','c')" {
skip "This is not yet persisted in dolt"
@test "types: DOUBLE PRECISION" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v ENUM('a','b','c') COMMENT 'tag:1',
pk BIGINT NOT NULL,
v DOUBLE PRECISION,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` ENUM('a','b','c') COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
}
@test "types: ENUM('a','b','c')" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v ENUM('a','b','c'),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` ENUM('a','b','c')" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'a');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -341,17 +428,56 @@ SQL
[ "$status" -eq "1" ]
}
@test "types: FLOAT" {
@test "types: FIXED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v FLOAT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v FIXED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` FLOAT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
}
@test "types: FIXED(9)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v FIXED(9),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
}
@test "types: FIXED(9,5)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v FIXED(9,5),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
}
@test "types: FLOAT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v FLOAT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` FLOAT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 1.25);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -374,14 +500,14 @@ SQL
@test "types: INT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v INT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v INT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` INT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` INT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 1073741823);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -399,14 +525,14 @@ SQL
@test "types: INT UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v INT UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v INT UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` INT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` INT UNSIGNED" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 2147483647);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -424,41 +550,67 @@ SQL
@test "types: INTEGER" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v INTEGER COMMENT 'tag:1',
pk BIGINT NOT NULL,
v INTEGER,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` INT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` INT" ]] || false
}
@test "types: INTEGER UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v INTEGER UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v INTEGER UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` INT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` INT UNSIGNED" ]] || false
}
@test "types: LONG" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v LONG,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
}
@test "types: LONG VARCHAR" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v LONG VARCHAR,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
}
@test "types: LONGBLOB" {
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v LONGBLOB COMMENT 'tag:1',
pk BIGINT NOT NULL,
v LONGBLOB,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` LONGBLOB COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` LONGBLOB" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -472,14 +624,14 @@ SQL
@test "types: LONGTEXT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v LONGTEXT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v LONGTEXT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` LONGTEXT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` LONGTEXT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -494,14 +646,14 @@ SQL
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v MEDIUMBLOB COMMENT 'tag:1',
pk BIGINT NOT NULL,
v MEDIUMBLOB,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMBLOB COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` MEDIUMBLOB" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -515,14 +667,14 @@ SQL
@test "types: MEDIUMINT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v MEDIUMINT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v MEDIUMINT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMINT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` MEDIUMINT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 4194303);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -540,14 +692,14 @@ SQL
@test "types: MEDIUMINT UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v MEDIUMINT UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v MEDIUMINT UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMINT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` MEDIUMINT UNSIGNED" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 8388607);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -565,14 +717,14 @@ SQL
@test "types: MEDIUMTEXT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v MEDIUMTEXT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v MEDIUMTEXT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` MEDIUMTEXT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -583,31 +735,147 @@ SQL
[[ "${lines[3]}" =~ " 1234567890 " ]] || false
}
@test "types: NATIONAL CHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NATIONAL CHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NATIONAL CHARACTER(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NATIONAL CHARACTER(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NATIONAL CHARACTER VARYING(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NATIONAL CHARACTER VARYING(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NATIONAL VARCHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NATIONAL VARCHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NCHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NCHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NVARCHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NVARCHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
}
@test "types: NUMERIC" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NUMERIC,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
}
@test "types: NUMERIC(9)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NUMERIC(9),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
}
@test "types: NUMERIC(9,5)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
v NUMERIC(9,5),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
}
@test "types: REAL" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v REAL COMMENT 'tag:1',
pk BIGINT NOT NULL,
v REAL,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` DOUBLE COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
}
@test "types: SET('a','b','c')" {
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v SET('a','b','c') COMMENT 'tag:1',
pk BIGINT NOT NULL,
v SET('a','b','c'),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` SET('a','b','c') COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` SET('a','b','c')" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'b,a');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -626,14 +894,14 @@ SQL
@test "types: SMALLINT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v SMALLINT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v SMALLINT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` SMALLINT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` SMALLINT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 16383);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -651,14 +919,14 @@ SQL
@test "types: SMALLINT UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v SMALLINT UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v SMALLINT UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` SMALLINT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` SMALLINT UNSIGNED" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 32767);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -676,14 +944,14 @@ SQL
@test "types: TEXT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TEXT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TEXT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TEXT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TEXT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -695,17 +963,16 @@ SQL
}
@test "types: TIME" {
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TIME COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TIME,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TIME COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TIME" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, '11:22:33.444444');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -727,14 +994,14 @@ SQL
@test "types: TIMESTAMP" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TIMESTAMP COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TIMESTAMP,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TIMESTAMP COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TIMESTAMP" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -757,14 +1024,14 @@ SQL
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TINYBLOB COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TINYBLOB,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYBLOB COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TINYBLOB" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -778,14 +1045,14 @@ SQL
@test "types: TINYINT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TINYINT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TINYINT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYINT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TINYINT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 63);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -803,14 +1070,14 @@ SQL
@test "types: TINYINT UNSIGNED" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TINYINT UNSIGNED COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TINYINT UNSIGNED,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYINT UNSIGNED COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TINYINT UNSIGNED" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 127);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -828,14 +1095,14 @@ SQL
@test "types: TINYTEXT" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v TINYTEXT COMMENT 'tag:1',
pk BIGINT NOT NULL,
v TINYTEXT,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` TINYTEXT COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` TINYTEXT" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -850,14 +1117,14 @@ SQL
skip "This is not yet persisted in dolt"
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v VARBINARY(10) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v VARBINARY(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARBINARY(10) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` VARBINARY(10)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -873,14 +1140,14 @@ SQL
@test "types: VARCHAR(10)" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v VARCHAR(10) COMMENT 'tag:1',
pk BIGINT NOT NULL,
v VARCHAR(10),
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10) COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` VARCHAR(10)" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -893,17 +1160,17 @@ SQL
[ "$status" -eq "1" ]
}
@test "types: VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_bin" {
@test "types: VARCHAR(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_bin COMMENT 'tag:1',
pk BIGINT NOT NULL,
v VARCHAR(10) CHARACTER SET utf32 COLLATE utf32_general_ci,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_bin COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf32 COLLATE utf32_general_ci" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]
@@ -919,14 +1186,14 @@ SQL
@test "types: YEAR" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
v YEAR COMMENT 'tag:1',
pk BIGINT NOT NULL,
v YEAR,
PRIMARY KEY (pk)
);
SQL
run dolt schema show
[ "$status" -eq "0" ]
[[ "$output" =~ "\`v\` YEAR COMMENT 'tag:1'" ]] || false
[[ "$output" =~ "\`v\` YEAR" ]] || false
dolt sql -q "INSERT INTO test VALUES (1, 1901);"
run dolt sql -q "SELECT * FROM test"
[ "$status" -eq "0" ]

1
go/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
out

View File

@@ -27,14 +27,15 @@ import (
)
func isHelp(str string) bool {
switch {
case str == "-h":
return true
case strings.TrimLeft(str, "- ") == "help":
return true
str = strings.TrimSpace(str)
if str[0] != '-' {
return false
}
return false
str = strings.ToLower(strings.TrimLeft(str, "- "))
return str == "h" || str == "help"
}
func hasHelpFlag(args []string) bool {

View File

@@ -20,9 +20,10 @@ import (
"strings"
"testing"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
"github.com/stretchr/testify/assert"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
const (
@@ -128,3 +129,13 @@ func runCommand(root Command, commandLine string) int {
return root.Exec(context.Background(), appName, tokens[1:], nil)
}
func TestHasHelpFlag(t *testing.T) {
assert.False(t, hasHelpFlag([]string{}))
assert.False(t, hasHelpFlag([]string{"help"}))
assert.True(t, hasHelpFlag([]string{"--help"}))
assert.True(t, hasHelpFlag([]string{"-h"}))
assert.False(t, hasHelpFlag([]string{"--param", "value", "--flag", "help", "arg2", "arg3"}))
assert.True(t, hasHelpFlag([]string{"--param", "value", "-f", "--help", "arg1", "arg2"}))
assert.True(t, hasHelpFlag([]string{"--param", "value", "--flag", "-h", "arg1", "arg2"}))
}

View File

@@ -89,40 +89,48 @@ func (cmd CatCmd) Exec(ctx context.Context, commandStr string, args []string, dE
}
root, verr := commands.GetWorkingWithVErr(dEnv)
if verr == nil {
var cm *doltdb.Commit
cm, verr = commands.MaybeGetCommitWithVErr(dEnv, args[0])
if verr == nil {
if cm != nil {
args = args[1:]
var err error
root, err = cm.GetRootValue()
if err != nil {
verr = errhand.BuildDError("unable to get the root value").AddCause(err).Build()
}
}
if len(args) == 0 {
usage()
return 1
}
verr = printConflicts(ctx, root, args)
}
if verr != nil {
return exitWithVerr(verr)
}
cm, verr := commands.MaybeGetCommitWithVErr(dEnv, args[0])
if verr != nil {
cli.PrintErrln(verr.Verbose())
return exitWithVerr(verr)
}
// If no commit was resolved from the first argument, assume the args are all table names and print the conflicts
if cm == nil {
if verr := printConflicts(ctx, root, args); verr != nil {
return exitWithVerr(verr)
}
return 0
}
tblNames := args[1:]
if len(tblNames) == 0 {
cli.Println("No tables specified")
usage()
return 1
}
root, err := cm.GetRootValue()
if err != nil {
return exitWithVerr(errhand.BuildDError("unable to get the root value").AddCause(err).Build())
}
if verr = printConflicts(ctx, root, tblNames); verr != nil {
return exitWithVerr(verr)
}
return 0
}
func exitWithVerr(verr errhand.VerboseError) int {
cli.PrintErrln(verr.Verbose())
return 1
}
func printConflicts(ctx context.Context, root *doltdb.RootValue, tblNames []string) errhand.VerboseError {
if len(tblNames) == 1 && tblNames[0] == "." {
var err error
@@ -167,7 +175,7 @@ func printConflicts(ctx context.Context, root *doltdb.RootValue, tblNames []stri
nullPrinter := nullprinter.NewNullPrinter(cnfRd.GetSchema())
fwtTr := fwt.NewAutoSizingFWTTransformer(cnfRd.GetSchema(), fwt.HashFillWhenTooLong, 1000)
transforms := pipeline.NewTransformCollection(
pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow),
pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow),
pipeline.NamedTransform{Name: "fwt", Func: fwtTr.TransformToFWT},
)

View File

@@ -187,40 +187,38 @@ func manualResolve(ctx context.Context, apr *argparser.ArgParseResults, dEnv *en
invalid, notFound, updatedTbl, err := tbl.ResolveConflicts(ctx, keysToResolve)
if err != nil {
verr = errhand.BuildDError("fatal: Failed to resolve conflicts").AddCause(err).Build()
} else {
for _, key := range invalid {
cli.Println(key, "is not a valid key")
}
return errhand.BuildDError("fatal: Failed to resolve conflicts").AddCause(err).Build()
}
for _, key := range notFound {
cli.Println(key, "is not the primary key of a conflicting row")
}
for _, key := range invalid {
cli.Println(key, "is not a valid key")
}
updatedHash, err := updatedTbl.HashOf()
for _, key := range notFound {
cli.Println(key, "is not the primary key of a conflicting row")
}
updatedHash, err := updatedTbl.HashOf()
if err != nil {
return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build()
}
hash, err := tbl.HashOf()
if err != nil {
return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build()
}
if hash == updatedHash {
root, err := root.PutTable(ctx, tblName, updatedTbl)
if err != nil {
return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build()
return errhand.BuildDError("").AddCause(err).Build()
}
hash, err := tbl.HashOf()
if err != nil {
return errhand.BuildDError("error: failed to get table hash").AddCause(err).Build()
}
if hash == updatedHash {
root, err := root.PutTable(ctx, tblName, updatedTbl)
if err != nil {
return errhand.BuildDError("").AddCause(err).Build()
}
verr = commands.UpdateWorkingWithVErr(dEnv, root)
if verr != nil {
return verr
}
if verr := commands.UpdateWorkingWithVErr(dEnv, root); verr != nil {
return verr
}
}

View File

@@ -24,4 +24,5 @@ var Commands = cli.NewSubCommandHandler("creds", "Commands for managing credenti
LsCmd{},
CheckCmd{},
UseCmd{},
ImportCmd{},
})

View File

@@ -0,0 +1,176 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package credcmds
import (
"context"
"fmt"
"io"
"os"
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
remotesapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/remotesapi/v1alpha1"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/creds"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env/actions"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
var importDocs = cli.CommandDocumentationContent{
ShortDesc: "Import a dolt credential from an existing .jwk file.",
LongDesc: `Imports a dolt credential from an existing .jwk file.
Dolt credentials are stored in the creds subdirectory of the global dolt conifg
directory as files with one key per file in JWK format. This command can import
a JWK from a file or stdin and places the imported key in the correct place for
dolt to find it as a valid credential.
This command will set the newly imported credential as the used credential if
there are currently not credentials. If this command does use the new
credential, it will call doltremoteapi to update user.name and user.email with
information from the remote user profile if those fields are not already
available in the local dolt config.`,
Synopsis: []string{"[--no-profile] [{{.LessThan}}jwk_filename{{.GreaterThan}}]"},
}
type ImportCmd struct{}
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
func (cmd ImportCmd) Name() string {
return "import"
}
// Description returns a description of the command
func (cmd ImportCmd) Description() string {
return importDocs.ShortDesc
}
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
func (cmd ImportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
ap := cmd.createArgParser()
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, importDocs, ap))
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
// that it be run from within a data repository directory
func (cmd ImportCmd) RequiresRepo() bool {
return false
}
// EventType returns the type of the event to log
func (cmd ImportCmd) EventType() eventsapi.ClientEventType {
return eventsapi.ClientEventType_CREDS_IMPORT
}
const noProfileFlag = "no-profile"
func (cmd ImportCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"jwk_filename", "The JWK file. If omitted, import operates on stdin."})
ap.SupportsFlag(noProfileFlag, "", "If provided, no attempt will be made to contact doltremoteapi and update user.name and user.email.")
return ap
}
// Exec executes the command
func (cmd ImportCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, importDocs, ap))
apr := cli.ParseArgs(ap, args, help)
credsDir, verr := actions.EnsureCredsDir(dEnv)
if verr != nil {
return commands.HandleVErrAndExitCode(verr, usage)
}
noprofile := apr.Contains(noProfileFlag)
var input io.ReadCloser = os.Stdin
if apr.NArg() == 1 {
var err error
input, err = dEnv.FS.OpenForRead(apr.Arg(0))
if err != nil {
verr = errhand.BuildDError("error: cannot open %s", apr.Arg(0)).AddCause(err).Build()
return commands.HandleVErrAndExitCode(verr, usage)
}
defer input.Close()
}
c, err := creds.JWKCredsRead(input)
if err != nil {
verr = errhand.BuildDError("error: could not read JWK").AddCause(err).Build()
return commands.HandleVErrAndExitCode(verr, usage)
}
if !c.IsPrivKeyValid() || !c.IsPubKeyValid() {
verr = errhand.BuildDError("error: deserialized JWK was not valid").Build()
return commands.HandleVErrAndExitCode(verr, usage)
}
_, err = creds.JWKCredsWriteToDir(dEnv.FS, credsDir, c)
if err != nil {
verr = errhand.BuildDError("error: could not write credentials to file").AddCause(err).Build()
return commands.HandleVErrAndExitCode(verr, usage)
}
cli.Println("Imported credential:", c.PubKeyBase32Str())
err = updateConfigToUseNewCredIfNoExistingCred(dEnv, c)
if err != nil {
cli.Println("Warning: could not update profile to use imported credential:", err)
}
if !noprofile {
err := updateProfileWithCredentials(ctx, dEnv, c)
if err != nil {
cli.Println("Warning: could not update profile with imported and used credentials:", err)
}
}
return 0
}
func updateProfileWithCredentials(ctx context.Context, dEnv *env.DoltEnv, c creds.DoltCreds) error {
gcfg, hasGCfg := dEnv.Config.GetConfig(env.GlobalConfig)
if !hasGCfg {
panic("Should have global config here...")
}
if _, err := gcfg.GetString(env.UserNameKey); err == nil {
// Already has a name...
return nil
}
if _, err := gcfg.GetString(env.UserEmailKey); err == nil {
// Already has an email...
return nil
}
host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, c)
if err != nil {
return fmt.Errorf("error: unable to connect to server with credentials: %w", err)
}
grpcClient := remotesapi.NewCredentialsServiceClient(conn)
resp, err := grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{})
if err != nil {
return fmt.Errorf("error: unable to call WhoAmI endpoint: %w", err)
}
userUpdates := map[string]string{
env.UserNameKey: resp.DisplayName,
env.UserEmailKey: resp.EmailAddress,
}
return gcfg.SetStrings(userUpdates)
}

View File

@@ -710,7 +710,7 @@ func buildPipeline(dArgs *diffArgs, joiner *rowconv.Joiner, ds *diff.DiffSplitte
nullPrinter := nullprinter.NewNullPrinter(untypedUnionSch)
fwtTr := fwt.NewAutoSizingFWTTransformer(untypedUnionSch, fwt.HashFillWhenTooLong, 1000)
transforms.AppendTransforms(
pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow),
pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow),
pipeline.NamedTransform{Name: fwtStageName, Func: fwtTr.TransformToFWT},
)
}

View File

@@ -96,6 +96,17 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string,
return HandleVErrAndExitCode(verr, usage)
}
// Specifies behavior of the login.
type loginBehavior int
// When logging in with newly minted credentials, they cannot be on the server
// yet. So open the browser immediately before checking the server.
var openBrowserFirst loginBehavior = 1
// When logging in with supplied credentials, they may already be associated
// with an account on the server. Check first before opening a browser.
var checkCredentialsThenOpenBrowser loginBehavior = 2
func loginWithNewCreds(ctx context.Context, dEnv *env.DoltEnv) errhand.VerboseError {
path, dc, err := actions.NewCredsFile(dEnv)
@@ -105,7 +116,7 @@ func loginWithNewCreds(ctx context.Context, dEnv *env.DoltEnv) errhand.VerboseEr
cli.Println(path)
return loginWithCreds(ctx, dEnv, dc)
return loginWithCreds(ctx, dEnv, dc, openBrowserFirst)
}
func loginWithExistingCreds(ctx context.Context, dEnv *env.DoltEnv, idOrPubKey string) errhand.VerboseError {
@@ -127,49 +138,70 @@ func loginWithExistingCreds(ctx context.Context, dEnv *env.DoltEnv, idOrPubKey s
return errhand.BuildDError("error: failed to load creds from file").AddCause(err).Build()
}
return loginWithCreds(ctx, dEnv, dc)
return loginWithCreds(ctx, dEnv, dc, checkCredentialsThenOpenBrowser)
}
func loginWithCreds(ctx context.Context, dEnv *env.DoltEnv, dc creds.DoltCreds) errhand.VerboseError {
loginUrl := dEnv.Config.GetStringOrDefault(env.AddCredsUrlKey, env.DefaultLoginUrl)
url := fmt.Sprintf("%s#%s", *loginUrl, dc.PubKeyBase32Str())
cli.Printf("Opening a browser to:\n\t%s\nPlease associate your key with your account.\n", url)
open.Start(url)
host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, dc)
if err != nil {
return errhand.BuildDError("error: unable to connect to server with credentials.").AddCause(err).Build()
func loginWithCreds(ctx context.Context, dEnv *env.DoltEnv, dc creds.DoltCreds, behavior loginBehavior) errhand.VerboseError {
grpcClient, verr := getCredentialsClient(dEnv, dc)
if verr != nil {
return verr
}
grpcClient := remotesapi.NewCredentialsServiceClient(conn)
cli.Println("Checking remote server looking for key association.")
var prevMsgLen int
var whoAmI *remotesapi.WhoAmIResponse
for whoAmI == nil {
prevMsgLen = cli.DeleteAndPrint(prevMsgLen, "requesting update")
var err error
if behavior == checkCredentialsThenOpenBrowser {
whoAmI, err = grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{})
}
if whoAmI == nil {
openBrowserForCredsAdd(dEnv, dc)
cli.Println("Checking remote server looking for key association.")
}
linePrinter := func() func(line string) {
prevMsgLen := 0
return func(line string) {
prevMsgLen = cli.DeleteAndPrint(prevMsgLen, line)
}
}()
for whoAmI == nil {
linePrinter("requesting update")
whoAmI, err = grpcClient.WhoAmI(ctx, &remotesapi.WhoAmIRequest{})
if err != nil {
for i := 0; i < loginRetryInterval; i++ {
prevMsgLen = cli.DeleteAndPrint(prevMsgLen, fmt.Sprintf("Retrying in %d", loginRetryInterval-i))
linePrinter(fmt.Sprintf("Retrying in %d", loginRetryInterval-i))
time.Sleep(time.Second)
}
} else {
cli.Printf("\n\n")
}
}
cli.Printf("\n\nKey successfully associated with user: %s email %s\n", whoAmI.Username, whoAmI.EmailAddress)
cli.Printf("Key successfully associated with user: %s email %s\n", whoAmI.Username, whoAmI.EmailAddress)
updateConfig(dEnv, whoAmI, dc)
return nil
}
func openBrowserForCredsAdd(dEnv *env.DoltEnv, dc creds.DoltCreds) {
loginUrl := dEnv.Config.GetStringOrDefault(env.AddCredsUrlKey, env.DefaultLoginUrl)
url := fmt.Sprintf("%s#%s", *loginUrl, dc.PubKeyBase32Str())
cli.Printf("Opening a browser to:\n\t%s\nPlease associate your key with your account.\n", url)
open.Start(url)
}
func getCredentialsClient(dEnv *env.DoltEnv, dc creds.DoltCreds) (remotesapi.CredentialsServiceClient, errhand.VerboseError) {
host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
conn, err := dEnv.GrpcConnWithCreds(fmt.Sprintf("%s:%s", *host, *port), false, dc)
if err != nil {
return nil, errhand.BuildDError("error: unable to connect to server with credentials.").AddCause(err).Build()
}
return remotesapi.NewCredentialsServiceClient(conn), nil
}
func updateConfig(dEnv *env.DoltEnv, whoAmI *remotesapi.WhoAmIResponse, dCreds creds.DoltCreds) {
gcfg, hasGCfg := dEnv.Config.GetConfig(env.GlobalConfig)

View File

@@ -16,17 +16,27 @@ package commands
import (
"context"
"errors"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rebase"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/ref"
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
)
const migrationPrompt = `Run "dolt migrate" to update this repository to the latest format`
const migrationMsg = "Migrating repository to the latest format"
const (
migrationPrompt = `Run "dolt migrate" to update this repository to the latest format`
migrationMsg = "Migrating repository to the latest format"
migratePushFlag = "push"
migratePullFlag = "pull"
)
type MigrateCmd struct{}
@@ -45,31 +55,218 @@ func (cmd MigrateCmd) CreateMarkdown(_ filesys.Filesys, _, _ string) error {
return nil
}
// Version displays the version of the running dolt client
func (cmd MigrateCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsFlag(migratePushFlag, "", "Push all migrated branches to the remote")
ap.SupportsFlag(migratePullFlag, "", "Update all remote refs for a migrated remote")
return ap
}
// EventType returns the type of the event to log
func (cmd MigrateCmd) EventType() eventsapi.ClientEventType {
return eventsapi.ClientEventType_TYPE_UNSPECIFIED
}
// Exec executes the command
func (cmd MigrateCmd) Exec(ctx context.Context, _ string, _ []string, dEnv *env.DoltEnv) int {
needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv)
if err != nil {
cli.PrintErrf(color.RedString("error checking for repository migration: %s", err.Error()))
func (cmd MigrateCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, pushDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.Contains(migratePushFlag) && apr.Contains(migratePullFlag) {
cli.PrintErrf(color.RedString("options --%s and --%s are mutually exclusive", migratePushFlag, migratePullFlag))
return 1
}
if !needed {
cli.Println("Repository format is up to date")
return 0
}
cli.Println(color.YellowString(migrationMsg))
err = rebase.MigrateUniqueTags(ctx, dEnv)
var err error
switch {
case apr.Contains(migratePushFlag):
err = pushMigratedRepo(ctx, dEnv, apr)
case apr.Contains(migratePullFlag):
err = fetchMigratedRemoteBranches(ctx, dEnv, apr)
default:
err = migrateLocalRepo(ctx, dEnv)
}
if err != nil {
cli.PrintErrf("error migrating repository: %s", err.Error())
cli.PrintErrln(color.RedString(err.Error()))
return 1
}
return 0
}
func migrateLocalRepo(ctx context.Context, dEnv *env.DoltEnv) error {
localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB)
if err != nil {
return err
}
if localMigrationNeeded {
cli.Println(color.YellowString(migrationMsg))
err = rebase.MigrateUniqueTags(ctx, dEnv)
if err != nil {
return err
}
} else {
cli.Println("Repository format is up to date")
}
remoteName := "origin"
remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName)
if err != nil {
// if we can't check the remote, exit silently
return nil
}
if !remoteMigrated {
cli.Println(fmt.Sprintf("Remote %s has not been migrated", remoteName))
cli.Println(fmt.Sprintf("Run 'dolt migrate --push %s' to update remote", remoteName))
} else {
cli.Println(fmt.Sprintf("Remote %s has been migrated", remoteName))
cli.Println(fmt.Sprintf("Run 'dolt migrate --pull %s' to update refs", remoteName))
}
return nil
}
func pushMigratedRepo(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgParseResults) error {
localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB)
if err != nil {
return err
}
if localMigrationNeeded {
return fmt.Errorf("Local repo must be migrated before pushing, run 'dolt migrate'")
}
remoteName := "origin"
if apr.NArg() > 0 {
remoteName = apr.Arg(0)
}
remotes, err := dEnv.GetRemotes()
if err != nil {
return err
}
remote, remoteOK := remotes[remoteName]
if !remoteOK {
return fmt.Errorf("unknown remote %s", remoteName)
}
remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName)
if err != nil {
return err
}
if remoteMigrated {
return fmt.Errorf("Remote %s has been migrated\nRun 'dolt migrate --pull' to update refs", remoteName)
} else {
// force push all branches
bb, err := dEnv.DoltDB.GetBranches(ctx)
if err != nil {
return err
}
for _, branch := range bb {
refSpec, err := ref.ParseRefSpec(branch.String())
if err != nil {
return err
}
src := refSpec.SrcRef(branch)
dest := refSpec.DestRef(src)
remoteRef, err := getTrackingRef(dest, remote)
if err != nil {
return err
}
destDB, err := remote.GetRemoteDB(ctx, dEnv.DoltDB.ValueReadWriter().Format())
if err != nil {
return err
}
cli.Println(color.BlueString(fmt.Sprintf("Pushing migrated branch %s to %s", branch.String(), remoteName)))
mode := ref.RefUpdateMode{Force: true}
err = pushToRemoteBranch(ctx, dEnv, mode, src, dest, remoteRef, dEnv.DoltDB, destDB, remote)
if err != nil {
return err
}
cli.Println()
}
}
return nil
}
func fetchMigratedRemoteBranches(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgParseResults) error {
localMigrationNeeded, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB)
if err != nil {
return err
}
if localMigrationNeeded {
return fmt.Errorf("Local repo must be migrated before pulling, run 'dolt migrate'")
}
remoteName := "origin"
if apr.NArg() > 0 {
remoteName = apr.Arg(0)
}
remoteMigrated, err := remoteHasBeenMigrated(ctx, dEnv, remoteName)
if err != nil {
return err
}
if !remoteMigrated {
return fmt.Errorf("Remote %s has not been migrated\nRun 'dolt migrate --push %s' to push migration", remoteName, remoteName)
}
// force fetch all branches
remotes, _ := dEnv.GetRemotes()
r, refSpecs, err := getRefSpecs(apr.Args(), dEnv, remotes)
if err == nil {
err = fetchRefSpecs(ctx, ref.RefUpdateMode{Force: true}, dEnv, r, refSpecs)
}
return err
}
func remoteHasBeenMigrated(ctx context.Context, dEnv *env.DoltEnv, remoteName string) (bool, error) {
remotes, err := dEnv.GetRemotes()
if err != nil {
return false, errors.New("error: failed to read remotes from config.")
}
remote, remoteOK := remotes[remoteName]
if !remoteOK {
return false, fmt.Errorf("cannot find remote %s", remoteName)
}
destDB, err := remote.GetRemoteDB(ctx, dEnv.DoltDB.ValueReadWriter().Format())
if err != nil {
return false, err
}
needed, err := rebase.NeedsUniqueTagMigration(ctx, destDB)
if err != nil {
return false, err
}
return !needed, nil
}
// These subcommands will trigger a unique tags migration
func MigrationNeeded(ctx context.Context, dEnv *env.DoltEnv, args []string) bool {
needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv)
needed, err := rebase.NeedsUniqueTagMigration(ctx, dEnv.DoltDB)
if err != nil {
cli.PrintErrf(color.RedString("error checking for repository migration: %s", err.Error()))
// ambiguous whether we need to migrate, but we should exit
@@ -83,7 +280,7 @@ func MigrationNeeded(ctx context.Context, dEnv *env.DoltEnv, args []string) bool
if len(args) > 0 {
subCmd = args[0]
}
cli.PrintErrln(color.RedString("Cannot execute dolt %s, repository format is out of date.", subCmd))
cli.PrintErrln(color.RedString("Cannot execute 'dolt %s', repository format is out of date.", subCmd))
cli.Println(migrationPrompt)
return true
}

View File

@@ -45,6 +45,7 @@ import (
dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/pipeline"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/json"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/csv"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/untyped/fwt"
@@ -130,7 +131,7 @@ func (cmd SqlCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) er
func (cmd SqlCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.SupportsString(queryFlag, "q", "SQL query to run", "Runs a single query and exits")
ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv. Defaults to tabular. ")
ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv, json. Defaults to tabular. ")
ap.SupportsString(saveFlag, "s", "saved query name", "Used with --query, save the query to the query catalog with the name provided. Saved queries can be examined in the dolt_query_catalog system table.")
ap.SupportsString(executeFlag, "x", "saved query name", "Executes a saved query with the given name")
ap.SupportsFlag(listSavedFlag, "l", "Lists all saved queries")
@@ -306,7 +307,7 @@ func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dE
}
func execShell(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) {
dbs := CollectDBs(mrEnv, roots, dsqle.NewDatabase)
dbs := CollectDBs(mrEnv, roots, newDatabase)
se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
@@ -326,7 +327,7 @@ func execShell(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do
}
func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, batchInput io.Reader, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) {
dbs := CollectDBs(mrEnv, roots, dsqle.NewBatchedDatabase)
dbs := CollectDBs(mrEnv, roots, newBatchedDatabase)
se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
@@ -345,8 +346,18 @@ func execBatch(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do
return newRoots, nil
}
type createDBFunc func(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database
func newDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database {
return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
}
func newBatchedDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database {
return dsqle.NewBatchedDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
}
func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, query string, format resultFormat) (map[string]*doltdb.RootValue, errhand.VerboseError) {
dbs := CollectDBs(mrEnv, roots, dsqle.NewDatabase)
dbs := CollectDBs(mrEnv, roots, newDatabase)
se, err := newSqlEngine(sqlCtx, mrEnv, roots, format, dbs...)
if err != nil {
return nil, errhand.VerboseErrorFromError(err)
@@ -374,15 +385,13 @@ func execQuery(sqlCtx *sql.Context, mrEnv env.MultiRepoEnv, roots map[string]*do
return newRoots, nil
}
type createDBFunc func(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) dsqle.Database
// CollectDBs takes a MultiRepoEnv and creates Database objects from each environment and returns a slice of these
// objects.
func CollectDBs(mrEnv env.MultiRepoEnv, roots map[string]*doltdb.RootValue, createDB createDBFunc) []dsqle.Database {
dbs := make([]dsqle.Database, 0, len(mrEnv))
_ = mrEnv.Iter(func(name string, dEnv *env.DoltEnv) (stop bool, err error) {
root := roots[name]
db := createDB(name, root, dEnv.DoltDB, dEnv.RepoState)
db := createDB(name, root, dEnv)
dbs = append(dbs, db)
return false, nil
})
@@ -452,8 +461,10 @@ func getFormat(format string) (resultFormat, errhand.VerboseError) {
return formatTabular, nil
case "csv":
return formatCsv, nil
case "json":
return formatJson, nil
default:
return formatTabular, errhand.BuildDError("Invalid argument for --result-format. Valid values are tabular,csv").Build()
return formatTabular, errhand.BuildDError("Invalid argument for --result-format. Valid values are tabular, csv, json").Build()
}
}
@@ -630,8 +641,11 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error {
// start the doltsql shell
historyFile := filepath.Join(".sqlhistory") // history file written to working dir
initialPrompt := fmt.Sprintf("%s> ", ctx.GetCurrentDatabase())
initialMultilinePrompt := fmt.Sprintf(fmt.Sprintf("%%%ds", len(initialPrompt)), "-> ")
rlConf := readline.Config{
Prompt: fmt.Sprintf("%s>", ctx.GetCurrentDatabase()),
Prompt: initialPrompt,
Stdout: cli.CliOut,
Stderr: cli.CliOut,
HistoryFile: historyFile,
@@ -648,7 +662,7 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error {
}
shell := ishell.NewUninterpreted(&shellConf)
shell.SetMultiPrompt(" -> ")
shell.SetMultiPrompt(initialMultilinePrompt)
// TODO: update completer on create / drop / alter statements
completer, err := newCompleter(ctx, currEnv)
if err != nil {
@@ -696,7 +710,9 @@ func runShell(ctx *sql.Context, se *sqlEngine, mrEnv env.MultiRepoEnv) error {
shell.Println(color.RedString(err.Error()))
}
shell.SetPrompt(fmt.Sprintf("%s>", ctx.GetCurrentDatabase()))
currPrompt := fmt.Sprintf("%s> ", ctx.GetCurrentDatabase())
shell.SetPrompt(currPrompt)
shell.SetMultiPrompt(fmt.Sprintf(fmt.Sprintf("%%%ds", len(currPrompt)), "-> "))
})
shell.Run()
@@ -1031,16 +1047,16 @@ func mergeResultIntoStats(statement sqlparser.Statement, rowIter sql.RowIter, s
} else if err != nil {
return err
} else {
numRowsUpdated := row[0].(int64)
s.unflushedEdits += int(numRowsUpdated)
s.unprintedEdits += int(numRowsUpdated)
okResult := row[0].(sql.OkResult)
s.unflushedEdits += int(okResult.RowsAffected)
s.unprintedEdits += int(okResult.RowsAffected)
switch statement.(type) {
case *sqlparser.Insert:
s.rowsInserted += int(numRowsUpdated)
s.rowsInserted += int(okResult.RowsAffected)
case *sqlparser.Delete:
s.rowsDeleted += int(numRowsUpdated)
s.rowsDeleted += int(okResult.RowsAffected)
case *sqlparser.Update:
s.rowsUpdated += int(numRowsUpdated)
s.rowsUpdated += int(okResult.RowsAffected)
}
}
}
@@ -1051,6 +1067,7 @@ type resultFormat byte
const (
formatTabular resultFormat = iota
formatCsv
formatJson
)
type sqlEngine struct {
@@ -1145,7 +1162,12 @@ func (se *sqlEngine) query(ctx *sql.Context, query string) (sql.Schema, sql.RowI
// Pretty prints the output of the new SQL engine
func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema, rowIter sql.RowIter) error {
var chanErr error
if isOkResult(sqlSch) {
return printOKResult(ctx, rowIter)
}
nbf := types.Format_Default
doltSch, err := dsqle.SqlSchemaToDoltResultSchema(sqlSch)
if err != nil {
return err
@@ -1159,31 +1181,15 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema,
rowChannel := make(chan row.Row)
p := pipeline.NewPartialPipeline(pipeline.InFuncForChannel(rowChannel))
nbf := types.Format_Default
go func() {
defer close(rowChannel)
var sqlRow sql.Row
for sqlRow, chanErr = rowIter.Next(); chanErr == nil; sqlRow, chanErr = rowIter.Next() {
taggedVals := make(row.TaggedValues)
for i, col := range sqlRow {
if col != nil {
taggedVals[uint64(i)] = types.String(fmt.Sprintf("%v", col))
}
}
// Parts of the pipeline depend on the output format, such as how we print null values and whether we pad strings.
switch se.resultFormat {
case formatCsv:
nullPrinter := nullprinter.NewNullPrinterWithNullString(untypedSch, "")
p.AddStage(pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow))
var r row.Row
r, chanErr = row.New(nbf, untypedSch, taggedVals)
if chanErr == nil {
rowChannel <- r
}
}
}()
nullPrinter := nullprinter.NewNullPrinter(untypedSch)
p.AddStage(pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow))
if se.resultFormat == formatTabular {
case formatTabular:
nullPrinter := nullprinter.NewNullPrinter(untypedSch)
p.AddStage(pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow))
autoSizeTransform := fwt.NewAutoSizingFWTTransformer(untypedSch, fwt.PrintAllWhenTooLong, 10000)
p.AddStage(pipeline.NamedTransform{Name: fwtStageName, Func: autoSizeTransform.TransformToFWT})
}
@@ -1198,6 +1204,8 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema,
wr, err = tabular.NewTextTableWriter(cliWr, untypedSch)
case formatCsv:
wr, err = csv.NewCSVWriter(cliWr, untypedSch, csv.NewCSVInfo())
case formatJson:
wr, err = json.NewJSONWriter(cliWr, untypedSch)
default:
panic("unimplemented output format type")
}
@@ -1233,19 +1241,80 @@ func (se *sqlEngine) prettyPrintResults(ctx context.Context, sqlSch sql.Schema,
p.InjectRow(fwtStageName, r)
}
// For some output formats, we want to convert everything to strings to be processed by the pipeline. For others,
// we want to leave types alone and let the writer figure out how to format it for output.
var rowFn func(r sql.Row) (row.Row, error)
switch se.resultFormat {
case formatJson:
rowFn = func(r sql.Row) (r2 row.Row, err error) {
return dsqle.SqlRowToDoltRow(nbf, r, doltSch)
}
default:
rowFn = func(r sql.Row) (row.Row, error) {
taggedVals := make(row.TaggedValues)
for i, col := range r {
if col != nil {
taggedVals[uint64(i)] = types.String(fmt.Sprintf("%v", col))
}
}
return row.New(nbf, untypedSch, taggedVals)
}
}
var iterErr error
// Read rows off the row iter and pass them to the pipeline channel
go func() {
defer close(rowChannel)
var sqlRow sql.Row
for sqlRow, iterErr = rowIter.Next(); iterErr == nil; sqlRow, iterErr = rowIter.Next() {
var r row.Row
r, iterErr = rowFn(sqlRow)
if iterErr == nil {
rowChannel <- r
}
}
}()
p.Start()
if err := p.Wait(); err != nil {
return fmt.Errorf("error processing results: %v", err)
}
if chanErr != io.EOF {
return fmt.Errorf("error processing results: %v", chanErr)
if iterErr != io.EOF {
return fmt.Errorf("error processing results: %v", iterErr)
}
return nil
}
var ErrNotNaked = fmt.Errorf("not a naked query.")
func printOKResult(ctx context.Context, iter sql.RowIter) error {
row, err := iter.Next()
defer iter.Close()
if err != nil {
return err
}
if okResult, ok := row[0].(sql.OkResult); ok {
rowNoun := "row"
if okResult.RowsAffected != 1 {
rowNoun = "rows"
}
cli.Printf("Query OK, %d %s affected\n", okResult.RowsAffected, rowNoun)
if okResult.Info != nil {
cli.Printf("%s\n", okResult.Info)
}
}
return nil
}
func isOkResult(sch sql.Schema) bool {
return sch.Equals(sql.OkResultSchema)
}
// Checks if the query is a naked delete and then deletes all rows if so. Returns true if it did so, false otherwise.
func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delete) bool {
@@ -1282,7 +1351,8 @@ func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delet
return false
}
printRowIter := sql.RowsToRowIter(sql.NewRow(rowData.Len()))
result := sql.OkResult{RowsAffected: rowData.Len()}
printRowIter := sql.RowsToRowIter(sql.NewRow(result))
emptyMap, err := types.NewMap(ctx, root.VRW())
if err != nil {
@@ -1299,7 +1369,7 @@ func (se *sqlEngine) checkThenDeleteAllRows(ctx *sql.Context, s *sqlparser.Delet
return false
}
_ = se.prettyPrintResults(ctx, sql.Schema{{Name: "updated", Type: sql.Uint64}}, printRowIter)
_ = se.prettyPrintResults(ctx, sql.OkResultSchema, printRowIter)
db, err := se.getDB(dbName)
if err != nil {

View File

@@ -369,7 +369,7 @@ func TestInsert(t *testing.T) {
}
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
t.Run(test.name, func(t *testing.T) {
dEnv := createEnvWithSeedData(t)
args := []string{"-q", test.query}

View File

@@ -16,6 +16,7 @@ package sqlserver
import (
"context"
"fmt"
"net"
"strconv"
"time"
@@ -106,7 +107,7 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se
}
}
dbs := commands.CollectDBs(mrEnv, roots, dsqle.NewDatabase)
dbs := commands.CollectDBs(mrEnv, roots, newAutoCommmitDatabase)
for _, db := range dbs {
sqlEngine.AddDatabase(db)
}
@@ -122,47 +123,10 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se
Auth: userAuth,
ConnReadTimeout: timeout,
ConnWriteTimeout: timeout,
Version: fmt.Sprintf("Dolt version %s", serverConfig.Version),
},
sqlEngine,
func(ctx context.Context, conn *mysql.Conn, host string) (sql.Session, *sql.IndexRegistry, *sql.ViewRegistry, error) {
mysqlSess := sql.NewSession(host, conn.RemoteAddr().String(), conn.User, conn.ConnectionID)
doltSess, err := dsqle.NewSessionWithDefaultRoots(mysqlSess, dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases())...)
if err != nil {
return nil, nil, nil, err
}
ir := sql.NewIndexRegistry()
vr := sql.NewViewRegistry()
sqlCtx := sql.NewContext(
ctx,
sql.WithIndexRegistry(ir),
sql.WithViewRegistry(vr),
sql.WithSession(doltSess))
dbs := commands.CollectDBs(mrEnv, roots, dsqle.NewDatabase)
for _, db := range dbs {
err := db.SetRoot(sqlCtx, db.GetDefaultRoot())
if err != nil {
return nil, nil, nil, err
}
err = dsqle.RegisterSchemaFragments(sqlCtx, db, db.GetDefaultRoot())
if err != nil {
cli.PrintErr(err)
return nil, nil, nil, err
}
}
sqlCtx.RegisterIndexDriver(dsqle.NewDoltIndexDriver(dbs...))
err = ir.LoadIndexes(sqlCtx, sqlEngine.Catalog.AllDatabases())
if err != nil {
return nil, nil, nil, err
}
return doltSess, ir, vr, nil
},
newSessionBuilder(sqlEngine),
)
if startError != nil {
@@ -179,6 +143,53 @@ func Serve(ctx context.Context, serverConfig *ServerConfig, serverController *Se
return
}
func newSessionBuilder(sqlEngine *sqle.Engine) server.SessionBuilder {
return func(ctx context.Context, conn *mysql.Conn, host string) (sql.Session, *sql.IndexRegistry, *sql.ViewRegistry, error) {
mysqlSess := sql.NewSession(host, conn.RemoteAddr().String(), conn.User, conn.ConnectionID)
doltSess, err := dsqle.NewSessionWithDefaultRoots(mysqlSess, dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases())...)
if err != nil {
return nil, nil, nil, err
}
ir := sql.NewIndexRegistry()
vr := sql.NewViewRegistry()
sqlCtx := sql.NewContext(
ctx,
sql.WithIndexRegistry(ir),
sql.WithViewRegistry(vr),
sql.WithSession(doltSess))
dbs := dbsAsDSQLDBs(sqlEngine.Catalog.AllDatabases())
for _, db := range dbs {
err := db.LoadRootFromRepoState(sqlCtx)
if err != nil {
return nil, nil, nil, err
}
err = dsqle.RegisterSchemaFragments(sqlCtx, db, db.GetDefaultRoot())
if err != nil {
cli.PrintErr(err)
return nil, nil, nil, err
}
}
// TODO: this shouldn't need to happen every session
sqlCtx.RegisterIndexDriver(dsqle.NewDoltIndexDriver(dbs...))
err = ir.LoadIndexes(sqlCtx, sqlEngine.Catalog.AllDatabases())
if err != nil {
return nil, nil, nil, err
}
return doltSess, ir, vr, nil
}
}
func newAutoCommmitDatabase(name string, defRoot *doltdb.RootValue, dEnv *env.DoltEnv) dsqle.Database {
return dsqle.NewDatabase(name, defRoot, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
}
func dbsAsDSQLDBs(dbs []sql.Database) []dsqle.Database {
dsqlDBs := make([]dsqle.Database, 0, len(dbs))

View File

@@ -46,7 +46,7 @@ var (
func TestServerArgs(t *testing.T) {
serverController := CreateServerController()
go func() {
SqlServerImpl(context.Background(), "dolt sql-server", []string{
startServer(context.Background(), "test", "dolt sql-server", []string{
"-H", "localhost",
"-P", "15200",
"-u", "username",
@@ -84,7 +84,7 @@ func TestServerBadArgs(t *testing.T) {
t.Run(strings.Join(test, " "), func(t *testing.T) {
serverController := CreateServerController()
go func(serverController *ServerController) {
SqlServerImpl(context.Background(), "dolt sql-server", test, env, serverController)
startServer(context.Background(), "test", "dolt sql-server", test, env, serverController)
}(serverController)
// In the event that a test fails, we need to prevent a test from hanging due to a running server
err := serverController.WaitForStart()

View File

@@ -49,7 +49,9 @@ Currently, only {{.EmphasisLeft}}SELECT{{.EmphasisRight}} statements are operati
},
}
type SqlServerCmd struct{}
type SqlServerCmd struct {
VersionStr string
}
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
func (cmd SqlServerCmd) Name() string {
@@ -95,11 +97,12 @@ func (cmd SqlServerCmd) RequiresRepo() bool {
// Exec executes the command
func (cmd SqlServerCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
return SqlServerImpl(ctx, commandStr, args, dEnv, nil)
return startServer(ctx, commandStr, cmd.VersionStr, args, dEnv, nil)
}
func SqlServerImpl(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int {
func startServer(ctx context.Context, versionStr, commandStr string, args []string, dEnv *env.DoltEnv, serverController *ServerController) int {
serverConfig := DefaultServerConfig()
serverConfig.Version = versionStr
ap := createArgParser(serverConfig)
help, _ := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, sqlServerDocs, ap))

View File

@@ -84,23 +84,26 @@ func (cmd RmCmd) Exec(ctx context.Context, commandStr string, args []string, dEn
}
working, verr := commands.GetWorkingWithVErr(dEnv)
if verr == nil {
verr := commands.ValidateTablesWithVErr(apr.Args(), working)
if verr == nil {
verr = removeTables(ctx, dEnv, apr.Args(), working)
}
if verr != nil {
return exitWithVerr(verr)
}
if verr != nil {
cli.PrintErrln(verr.Verbose())
return 1
if verr := commands.ValidateTablesWithVErr(apr.Args(), working); verr != nil {
return exitWithVerr(verr)
}
if verr := removeTables(ctx, dEnv, apr.Args(), working); verr != nil {
return exitWithVerr(verr)
}
return 0
}
func exitWithVerr(verr errhand.VerboseError) int {
cli.PrintErrln(verr.Verbose())
return 1
}
func removeTables(ctx context.Context, dEnv *env.DoltEnv, tables []string, working *doltdb.RootValue) errhand.VerboseError {
working, err := working.RemoveTables(ctx, tables...)

View File

@@ -41,7 +41,7 @@ import (
)
const (
Version = "0.15.2"
Version = "0.16.0"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}
@@ -52,7 +52,7 @@ var doltCommand = cli.NewSubCommandHandler("dolt", "it's git for data", []cli.Co
commands.ResetCmd{},
commands.CommitCmd{},
commands.SqlCmd{VersionStr: Version},
sqlserver.SqlServerCmd{},
sqlserver.SqlServerCmd{VersionStr: Version},
commands.LogCmd{},
commands.DiffCmd{},
commands.BlameCmd{},

View File

@@ -103,6 +103,7 @@ const (
ClientEventType_BLAME ClientEventType = 45
ClientEventType_CREDS_CHECK ClientEventType = 46
ClientEventType_CREDS_USE ClientEventType = 47
ClientEventType_CREDS_IMPORT ClientEventType = 48
)
var ClientEventType_name = map[int32]string{
@@ -154,6 +155,7 @@ var ClientEventType_name = map[int32]string{
45: "BLAME",
46: "CREDS_CHECK",
47: "CREDS_USE",
48: "CREDS_IMPORT",
}
var ClientEventType_value = map[string]int32{
@@ -205,6 +207,7 @@ var ClientEventType_value = map[string]int32{
"BLAME": 45,
"CREDS_CHECK": 46,
"CREDS_USE": 47,
"CREDS_IMPORT": 48,
}
func (x ClientEventType) String() string {
@@ -309,53 +312,54 @@ func init() {
}
var fileDescriptor_d970d881fa70959f = []byte{
// 764 bytes of a gzipped FileDescriptorProto
// 772 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xcb, 0x72, 0xdb, 0x36,
0x14, 0xad, 0xfc, 0x90, 0x65, 0xf8, 0x75, 0x8d, 0xd8, 0x79, 0xb6, 0x8e, 0xeb, 0xa6, 0x2f, 0xb5,
0x16, 0x27, 0xd3, 0x99, 0x6e, 0xba, 0x82, 0xc0, 0x2b, 0x09, 0x13, 0x90, 0x60, 0x00, 0xd0, 0x4a,
0xba, 0xc1, 0xc8, 0x0a, 0xeb, 0xb0, 0xa3, 0x48, 0xaa, 0x44, 0x7b, 0xa6, 0x3f, 0xd1, 0x6f, 0xee,
0x80, 0x6c, 0x44, 0x5b, 0x9b, 0xee, 0x78, 0xcf, 0x3d, 0xe7, 0x00, 0x38, 0xbc, 0x00, 0xf9, 0xf5,
0xc3, 0x6c, 0x52, 0x04, 0xcb, 0x6c, 0x71, 0x97, 0x8f, 0xb3, 0x65, 0x90, 0xdd, 0x65, 0xd3, 0x62,
0x39, 0x9a, 0xe7, 0xc1, 0xdd, 0xeb, 0xd1, 0x64, 0xfe, 0x71, 0xf4, 0xba, 0x82, 0xdc, 0x78, 0x36,
0x5d, 0x16, 0xa3, 0x69, 0xb1, 0xec, 0xcc, 0x17, 0xb3, 0x62, 0x46, 0xcf, 0xbd, 0xae, 0xf3, 0x59,
0xd7, 0x59, 0xe9, 0x3a, 0x9f, 0x75, 0xed, 0x01, 0x69, 0x25, 0x93, 0x51, 0xf1, 0xc7, 0x6c, 0xf1,
0x89, 0x3e, 0x25, 0x27, 0x89, 0x64, 0xb6, 0xa7, 0x74, 0xe4, 0xd2, 0xd8, 0x24, 0xc8, 0x45, 0x4f,
0x60, 0x08, 0x5f, 0xd0, 0x5d, 0xb2, 0x2d, 0x45, 0x9c, 0xbe, 0x83, 0x06, 0xdd, 0x23, 0x3b, 0x43,
0x11, 0x87, 0x6a, 0x68, 0x60, 0x83, 0x12, 0xd2, 0x0c, 0x99, 0x1e, 0x8a, 0x18, 0x36, 0xdb, 0xff,
0x34, 0xc9, 0x11, 0x9f, 0xe4, 0xd9, 0xb4, 0x40, 0xbf, 0x8c, 0xfd, 0x7b, 0x9e, 0xd1, 0x13, 0x02,
0xf6, 0x7d, 0x82, 0x6b, 0x6e, 0x2d, 0xb2, 0x25, 0x62, 0x61, 0xa1, 0xe1, 0xf5, 0xc6, 0x32, 0x9b,
0x7a, 0xaf, 0x1d, 0xb2, 0xc9, 0xc2, 0x10, 0x36, 0xfd, 0x62, 0x1a, 0x0d, 0x5a, 0xd8, 0xf2, 0x7d,
0xae, 0xa2, 0x48, 0x58, 0xd8, 0xf6, 0x7d, 0xf3, 0x56, 0x42, 0x93, 0x1e, 0x12, 0x62, 0xde, 0x4a,
0x67, 0x50, 0x5f, 0xa1, 0x86, 0x1d, 0xdf, 0x90, 0xaa, 0x0f, 0x2d, 0xef, 0x1b, 0x8a, 0x5e, 0x0f,
0x76, 0xbd, 0x45, 0x84, 0xba, 0x8f, 0x40, 0xbc, 0x45, 0x57, 0xb3, 0x98, 0x0f, 0x60, 0x8f, 0xee,
0x93, 0x16, 0x1f, 0x20, 0x7f, 0xa3, 0x52, 0x0b, 0xfb, 0xbe, 0xa3, 0x31, 0x52, 0x16, 0xe1, 0xc0,
0x4b, 0x93, 0xd4, 0x0c, 0xe0, 0xb0, 0xfa, 0x92, 0x12, 0x8e, 0xbc, 0x49, 0x0f, 0x2d, 0x1f, 0x00,
0xf8, 0x4f, 0x2e, 0x55, 0x8c, 0x70, 0x5c, 0x46, 0xa1, 0xfa, 0x22, 0x06, 0xea, 0xa3, 0xb8, 0x42,
0x6d, 0x84, 0x8a, 0xe1, 0x51, 0xb5, 0xd5, 0xb8, 0x27, 0xfa, 0x70, 0x42, 0x9b, 0x64, 0x43, 0x1a,
0x38, 0x2d, 0x8f, 0xc7, 0x07, 0x18, 0x31, 0x78, 0x4c, 0x81, 0xec, 0x5b, 0xd6, 0x95, 0xe8, 0x44,
0x94, 0x28, 0x6d, 0xe1, 0x49, 0x8d, 0xe0, 0xbb, 0x12, 0x79, 0x5a, 0x23, 0x5c, 0x23, 0xb3, 0x08,
0xcf, 0xfc, 0x8e, 0x2b, 0x44, 0x47, 0xf0, 0xbc, 0xae, 0xa2, 0x2b, 0x78, 0x51, 0x57, 0x3c, 0x81,
0x2f, 0x6b, 0xad, 0x41, 0x89, 0xdc, 0xc2, 0x57, 0xf4, 0x98, 0x1c, 0x54, 0x48, 0x92, 0x5a, 0xa7,
0xd5, 0x10, 0xce, 0x6a, 0x92, 0x8e, 0x4a, 0xe4, 0x25, 0x3d, 0x20, 0xbb, 0x5c, 0x63, 0x68, 0x5c,
0x8c, 0x43, 0x38, 0x2f, 0x13, 0x2a, 0x4b, 0x1d, 0xc1, 0xd7, 0x75, 0x25, 0x0d, 0x5c, 0x94, 0x95,
0x8a, 0x7b, 0x8e, 0x33, 0x0b, 0xdf, 0x78, 0xab, 0xb2, 0xd2, 0x68, 0x94, 0xbc, 0x42, 0x78, 0x45,
0x5f, 0x92, 0x17, 0x55, 0x9e, 0x2c, 0x11, 0xae, 0x8f, 0xd6, 0x69, 0x4c, 0x94, 0x8b, 0xd0, 0xb2,
0x90, 0x59, 0x06, 0xdf, 0xfa, 0xf9, 0xaa, 0x09, 0x03, 0x66, 0x1c, 0x1f, 0xa4, 0xf1, 0x1b, 0x03,
0xdf, 0xd1, 0x57, 0xe4, 0xfc, 0xa1, 0x34, 0x54, 0xc3, 0x58, 0x2a, 0x16, 0x3a, 0xa9, 0x38, 0xb3,
0x42, 0xc5, 0x06, 0xbe, 0xa7, 0x17, 0xe4, 0xec, 0x21, 0x2b, 0x4d, 0xd6, 0x38, 0x3f, 0xf8, 0x89,
0xab, 0x39, 0x1a, 0xbb, 0xcc, 0x20, 0xfc, 0x48, 0x29, 0x39, 0xbc, 0x87, 0x2a, 0x65, 0xa1, 0xfd,
0x90, 0xf9, 0xdf, 0x94, 0xfd, 0x44, 0xcf, 0xc8, 0xf3, 0x1a, 0x95, 0xc2, 0x58, 0x57, 0x05, 0xd6,
0x13, 0x12, 0x0d, 0xfc, 0xec, 0x7f, 0x7f, 0x57, 0xb2, 0x08, 0xe1, 0x92, 0x1e, 0x91, 0xbd, 0x2a,
0x9d, 0x72, 0xa6, 0xa0, 0x53, 0x67, 0x99, 0x1a, 0x84, 0xa0, 0xfd, 0x27, 0x69, 0x45, 0x59, 0xb1,
0xc8, 0xc7, 0x22, 0xa4, 0x8f, 0x09, 0x8d, 0xd0, 0x6a, 0xc1, 0xd7, 0xae, 0xc2, 0x09, 0x81, 0xee,
0x7b, 0x8b, 0x66, 0x75, 0x60, 0x0c, 0xa1, 0x41, 0x9f, 0x90, 0x47, 0xab, 0x00, 0x22, 0xe3, 0x50,
0xb2, 0xc4, 0x60, 0x08, 0x1b, 0xbe, 0x71, 0xef, 0x1c, 0x09, 0x77, 0xa8, 0xb5, 0xd2, 0xb0, 0xd9,
0x46, 0xb2, 0xc7, 0x8a, 0x62, 0x91, 0x5f, 0xdf, 0x16, 0x99, 0x08, 0xe9, 0x33, 0x72, 0xca, 0xac,
0xd5, 0xa2, 0x9b, 0xda, 0xf5, 0xcb, 0x77, 0x4a, 0x8e, 0x2b, 0x0b, 0x97, 0x6a, 0xe9, 0xca, 0xf1,
0x44, 0xd8, 0xb8, 0xd8, 0x6a, 0x35, 0xa0, 0xd1, 0xbe, 0x24, 0xdb, 0x6c, 0x3e, 0xaf, 0xf6, 0xcb,
0x92, 0xc4, 0x89, 0x70, 0x4d, 0xbd, 0x4f, 0x5a, 0x1e, 0x0f, 0x95, 0xb4, 0xd0, 0xe8, 0x0e, 0x7f,
0x4f, 0x6f, 0xf2, 0xe2, 0xe3, 0xed, 0x75, 0x67, 0x3c, 0xfb, 0x14, 0x4c, 0xf2, 0xbf, 0x6e, 0xf3,
0x0f, 0xa3, 0x62, 0x74, 0x99, 0x4f, 0xc7, 0x41, 0xf9, 0x62, 0xdd, 0xcc, 0x82, 0x9b, 0x6c, 0x1a,
0x94, 0x8f, 0x51, 0xf0, 0x7f, 0x6f, 0xd8, 0x6f, 0x2b, 0xe8, 0xba, 0x59, 0x2a, 0x7e, 0xf9, 0x37,
0x00, 0x00, 0xff, 0xff, 0xbf, 0x8f, 0x66, 0xbe, 0xf8, 0x04, 0x00, 0x00,
0x16, 0x9b, 0xe9, 0x4c, 0x37, 0x5d, 0x41, 0xe0, 0x95, 0x84, 0x09, 0x48, 0x30, 0x00, 0x68, 0x25,
0xdd, 0x60, 0x64, 0x85, 0x75, 0xd8, 0x51, 0x24, 0x55, 0xa2, 0x3d, 0xd3, 0xbf, 0xe9, 0xa7, 0x76,
0x40, 0x26, 0x62, 0xac, 0x4d, 0x77, 0x3c, 0xe7, 0xde, 0x73, 0x08, 0x1c, 0x5c, 0x80, 0xfc, 0xf6,
0x6e, 0x36, 0x29, 0x82, 0x65, 0xb6, 0xb8, 0xcb, 0xc7, 0xd9, 0x32, 0xc8, 0xee, 0xb2, 0x69, 0xb1,
0x1c, 0xcd, 0xf3, 0xe0, 0xee, 0xe5, 0x68, 0x32, 0x7f, 0x3f, 0x7a, 0x59, 0x51, 0x6e, 0x3c, 0x9b,
0x2e, 0x8b, 0xd1, 0xb4, 0x58, 0x76, 0xe6, 0x8b, 0x59, 0x31, 0xa3, 0xe7, 0x5e, 0xd7, 0xf9, 0xa4,
0xeb, 0xac, 0x74, 0x9d, 0x4f, 0xba, 0xf6, 0x80, 0xb4, 0x92, 0xc9, 0xa8, 0xf8, 0x73, 0xb6, 0xf8,
0x40, 0x1f, 0x93, 0x93, 0x44, 0x32, 0xdb, 0x53, 0x3a, 0x72, 0x69, 0x6c, 0x12, 0xe4, 0xa2, 0x27,
0x30, 0x84, 0x2f, 0xe8, 0x2e, 0xd9, 0x96, 0x22, 0x4e, 0xdf, 0x40, 0x83, 0xee, 0x91, 0x9d, 0xa1,
0x88, 0x43, 0x35, 0x34, 0xb0, 0x41, 0x09, 0x69, 0x86, 0x4c, 0x0f, 0x45, 0x0c, 0x9b, 0xed, 0x7f,
0x9b, 0xe4, 0x88, 0x4f, 0xf2, 0x6c, 0x5a, 0xa0, 0xff, 0x8d, 0xfd, 0x67, 0x9e, 0xd1, 0x13, 0x02,
0xf6, 0x6d, 0x82, 0x6b, 0x6e, 0x2d, 0xb2, 0x25, 0x62, 0x61, 0xa1, 0xe1, 0xf5, 0xc6, 0x32, 0x9b,
0x7a, 0xaf, 0x1d, 0xb2, 0xc9, 0xc2, 0x10, 0x36, 0xfd, 0xcf, 0x34, 0x1a, 0xb4, 0xb0, 0xe5, 0xeb,
0x5c, 0x45, 0x91, 0xb0, 0xb0, 0xed, 0xeb, 0xe6, 0xb5, 0x84, 0x26, 0x3d, 0x24, 0xc4, 0xbc, 0x96,
0xce, 0xa0, 0xbe, 0x42, 0x0d, 0x3b, 0xbe, 0x20, 0x55, 0x1f, 0x5a, 0xde, 0x37, 0x14, 0xbd, 0x1e,
0xec, 0x7a, 0x8b, 0x08, 0x75, 0x1f, 0x81, 0x78, 0x8b, 0xae, 0x66, 0x31, 0x1f, 0xc0, 0x1e, 0xdd,
0x27, 0x2d, 0x3e, 0x40, 0xfe, 0x4a, 0xa5, 0x16, 0xf6, 0x7d, 0x45, 0x63, 0xa4, 0x2c, 0xc2, 0x81,
0x97, 0x26, 0xa9, 0x19, 0xc0, 0x61, 0xf5, 0x25, 0x25, 0x1c, 0x79, 0x93, 0x1e, 0x5a, 0x3e, 0x00,
0xf0, 0x9f, 0x5c, 0xaa, 0x18, 0xe1, 0xb8, 0x8c, 0x42, 0xf5, 0x45, 0x0c, 0xd4, 0x47, 0x71, 0x85,
0xda, 0x08, 0x15, 0xc3, 0x83, 0x6a, 0xa9, 0x71, 0x4f, 0xf4, 0xe1, 0x84, 0x36, 0xc9, 0x86, 0x34,
0x70, 0x5a, 0x6e, 0x8f, 0x0f, 0x30, 0x62, 0xf0, 0x90, 0x02, 0xd9, 0xb7, 0xac, 0x2b, 0xd1, 0x89,
0x28, 0x51, 0xda, 0xc2, 0xa3, 0x9a, 0xc1, 0x37, 0x25, 0xf3, 0xb8, 0x66, 0xb8, 0x46, 0x66, 0x11,
0x9e, 0xf8, 0x15, 0x57, 0x8c, 0x8e, 0xe0, 0x69, 0x8d, 0xa2, 0x2b, 0x78, 0x56, 0x23, 0x9e, 0xc0,
0x97, 0xb5, 0xd6, 0xa0, 0x44, 0x6e, 0xe1, 0x2b, 0x7a, 0x4c, 0x0e, 0x2a, 0x26, 0x49, 0xad, 0xd3,
0x6a, 0x08, 0x67, 0x75, 0x93, 0x8e, 0x4a, 0xe6, 0x39, 0x3d, 0x20, 0xbb, 0x5c, 0x63, 0x68, 0x5c,
0x8c, 0x43, 0x38, 0x2f, 0x13, 0x2a, 0xa1, 0x8e, 0xe0, 0xeb, 0x1a, 0x49, 0x03, 0x17, 0x25, 0x52,
0x71, 0xcf, 0x71, 0x66, 0xe1, 0x1b, 0x6f, 0x55, 0x22, 0x8d, 0x46, 0xc9, 0x2b, 0x84, 0x17, 0xf4,
0x39, 0x79, 0x56, 0xe5, 0xc9, 0x12, 0xe1, 0xfa, 0x68, 0x9d, 0xc6, 0x44, 0xb9, 0x08, 0x2d, 0x0b,
0x99, 0x65, 0xf0, 0xad, 0x9f, 0xaf, 0xba, 0x61, 0xc0, 0x8c, 0xe3, 0x83, 0x34, 0x7e, 0x65, 0xe0,
0x3b, 0xfa, 0x82, 0x9c, 0xdf, 0x97, 0x86, 0x6a, 0x18, 0x4b, 0xc5, 0x42, 0x27, 0x15, 0x67, 0x56,
0xa8, 0xd8, 0xc0, 0xf7, 0xf4, 0x82, 0x9c, 0xdd, 0xef, 0x4a, 0x93, 0xb5, 0x9e, 0x1f, 0xfc, 0xc4,
0xd5, 0x3d, 0x1a, 0xbb, 0xcc, 0x20, 0xfc, 0x48, 0x29, 0x39, 0xfc, 0x8c, 0x55, 0xca, 0x42, 0xfb,
0x7e, 0xe7, 0xc7, 0x29, 0xfb, 0x89, 0x9e, 0x91, 0xa7, 0x35, 0x2b, 0x85, 0xb1, 0xae, 0x0a, 0xac,
0x27, 0x24, 0x1a, 0xf8, 0xd9, 0x1f, 0x7f, 0x57, 0xb2, 0x08, 0xe1, 0x92, 0x1e, 0x91, 0xbd, 0x2a,
0x9d, 0x72, 0xa6, 0xa0, 0x53, 0x67, 0x99, 0x1a, 0x84, 0xa0, 0x4c, 0xa8, 0x84, 0x1f, 0x4f, 0xfc,
0x97, 0xf6, 0x5f, 0xa4, 0x15, 0x65, 0xc5, 0x22, 0x1f, 0x8b, 0x90, 0x3e, 0x24, 0x34, 0x42, 0xab,
0x05, 0x5f, 0xbb, 0x1c, 0x27, 0x04, 0xba, 0x6f, 0x2d, 0x9a, 0x55, 0x04, 0x18, 0x42, 0x83, 0x3e,
0x22, 0x0f, 0x56, 0x91, 0x44, 0xc6, 0xa1, 0x64, 0x89, 0xc1, 0x10, 0x36, 0x7c, 0xe1, 0xb3, 0x9d,
0x25, 0xdc, 0xa1, 0xd6, 0x4a, 0xc3, 0x66, 0x1b, 0xc9, 0x1e, 0x2b, 0x8a, 0x45, 0x7e, 0x7d, 0x5b,
0x64, 0x22, 0xa4, 0x4f, 0xc8, 0x29, 0xb3, 0x56, 0x8b, 0x6e, 0x6a, 0xd7, 0xaf, 0xe3, 0x29, 0x39,
0xae, 0x2c, 0x5c, 0xaa, 0xa5, 0x2b, 0x07, 0x16, 0x61, 0xe3, 0x62, 0xab, 0xd5, 0x80, 0x46, 0xfb,
0x92, 0x6c, 0xb3, 0xf9, 0xbc, 0x5a, 0x2f, 0x4b, 0x12, 0x27, 0xc2, 0x35, 0xf5, 0x3e, 0x69, 0x79,
0x3e, 0x54, 0xd2, 0x42, 0xa3, 0x3b, 0xfc, 0x23, 0xbd, 0xc9, 0x8b, 0xf7, 0xb7, 0xd7, 0x9d, 0xf1,
0xec, 0x43, 0x30, 0xc9, 0xff, 0xbe, 0xcd, 0xdf, 0x8d, 0x8a, 0xd1, 0x65, 0x3e, 0x1d, 0x07, 0xe5,
0x1b, 0x76, 0x33, 0x0b, 0x6e, 0xb2, 0x69, 0x50, 0x3e, 0x4f, 0xc1, 0xff, 0xbd, 0x6a, 0xbf, 0xaf,
0xa8, 0xeb, 0x66, 0xa9, 0xf8, 0xf5, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x04, 0xe0, 0xf2, 0x36,
0x0a, 0x05, 0x00, 0x00,
}

View File

@@ -87,10 +87,8 @@ require (
replace github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi => ./gen/proto/dolt/services/eventsapi
replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd
replace github.com/src-d/go-mysql-server => github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e
//replace github.com/src-d/go-mysql-server => ../../go-mysql-server
replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9
replace vitess.io/vitess => github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee
go 1.13

View File

@@ -359,7 +359,11 @@ github.com/krishicks/yaml-patch v0.0.10/go.mod h1:Sm5TchwZS6sm7RJoyg87tzxm2ZcKzd
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU=
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/liquidata-inc/go-mysql-server v0.4.1-0.20200311072715-b12ae9d0cc97 h1:Zr78cjOfa0bM4X5JA692xhx3QvFPTsJiM0bD0xl/22Q=
github.com/liquidata-inc/go-mysql-server v0.4.1-0.20200311072715-b12ae9d0cc97/go.mod h1:Lh0pg7jnO08HxFm6oj6gtcSTUeeOTu4Spt50Aeo2mes=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6 h1:iKET+xfMh3NaiiIbrMBLi+MJ9hwmm++7DBtPGfarf50=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6 h1:iKET+xfMh3NaiiIbrMBLi+MJ9hwmm++7DBtPGfarf50=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200330231002-2ac5a85cf8d6/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4 h1:UIksBT7bRENT38ErKSz+auGLc7a5tDpCHwNhuajoJbU=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403150612-34c67410dcd4/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc=
@@ -367,14 +371,36 @@ github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2 h1
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200403171307-83ac7e7158e2/go.mod h1:TCTrDbzIA05e8zV3SW+nsjc1LCR58GRSOIcF32lJ+Qc=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd h1:SGGh7+XPqPYw3LaIK4VUvy/81Za1Y3p29lh4WDMtXh0=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200407175239-21fb18d4d9fd/go.mod h1:xu1cUi3vfWVJZ/9mQl9f8sdfJGobnS7kIucM3lfWIPk=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200412072052-a6583959dafb/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200412232521-1e406e8056fb/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414052025-88d3eff3f7f5 h1:Liiz/stNuLoWg1j1A/yGChITW4H/IbEyFwPGZPk+B8M=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414052025-88d3eff3f7f5/go.mod h1:n8M6jtUZ5myu3O9kfcLqRnXPBVkqbkoBPe45mnYCKd0=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140 h1:rxT0Pkt2ZLS0P4m8scQ3TATRjKYcntF6F0X5/yHcIDg=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414183459-0b63a0868140/go.mod h1:tK/saWoda2x+KXyGsdVariMdfVOsjmRgQF2pbl4Mr1E=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e h1:cYKHqocy3oNkPmfayDwIswVy14Dcp8q5FFSYLS4FvIA=
github.com/liquidata-inc/go-mysql-server v0.5.1-0.20200414214346-0c65dac7ca1e/go.mod h1:tK/saWoda2x+KXyGsdVariMdfVOsjmRgQF2pbl4Mr1E=
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0 h1:phMgajKClMUiIr+hF2LGt8KRuUa2Vd2GI1sNgHgSXoU=
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0/go.mod h1:YC1rI9k5gx8D02ljlbxDfZe80s/iq8bGvaaQsvR+qxs=
github.com/liquidata-inc/mmap-go v1.0.3 h1:2LndAeAtup9rpvUmu4wZSYCsjCQ0Zpc+NqE+6+PnT7g=
github.com/liquidata-inc/mmap-go v1.0.3/go.mod h1:w0doE7jfkuDEZyxb/zD3VWnRaQBYx1uDTS816kH8HoY=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1 h1:BDpmbvQ9I8npWe7TOzQcGkrn7EYHrW1hJtTd9h8MNZA=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200225183643-358992a611e1/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f h1:fqsJy7h3D3esm9tYSzU7LV6h2tfifdYTanPuDL5LJ1A=
github.com/liquidata-inc/vitess v0.0.0-20200102230944-f3410911d61f/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54 h1:LR/OEhgIYVQuo5a/lxr8Ps76AZ1FNWUgNANfKCA0XSQ=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54 h1:LR/OEhgIYVQuo5a/lxr8Ps76AZ1FNWUgNANfKCA0XSQ=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/vitess v0.0.0-20200318153456-e0b079da3f54/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9 h1:eaE6IFxMviaDSNFaKlTbNPA/+0Vhj/XgV6lG2SaoAWM=
github.com/liquidata-inc/vitess v0.0.0-20200407071440-54a487aaf7d9/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/vitess v0.0.0-20200410001601-55d11bea14ca h1:m09m0bRpTa3PCxMNcnRf5AiVK7ME0PVIci1vwuciZ5w=
github.com/liquidata-inc/vitess v0.0.0-20200410001601-55d11bea14ca/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee h1:r8ApUMNHHEyzRhPbuIHrWbr7FOTW4Yo5Sm1HpOEzPrQ=
github.com/liquidata-inc/vitess v0.0.0-20200413233505-a88cc54bd1ee/go.mod h1:vn/QvIl/1+N6+qjheejcLt8jmX2kQSQwFinzZuoY1VY=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=

View File

@@ -188,7 +188,7 @@ func PrintSqlTableDiffs(ctx context.Context, r1, r2 *doltdb.RootValue, wr io.Wri
transforms := pipeline.NewTransformCollection()
nullPrinter := nullprinter.NewNullPrinter(sch)
transforms.AppendTransforms(
pipeline.NewNamedTransform(nullprinter.NULL_PRINTING_STAGE, nullPrinter.ProcessRow),
pipeline.NewNamedTransform(nullprinter.NullPrintingStage, nullPrinter.ProcessRow),
)
sink, err := NewSQLDiffSink(wr, sch, tblName)
if err != nil {

View File

@@ -277,9 +277,11 @@ func (root *RootValue) GenerateTagsForNewColumns(ctx context.Context, tableName
}
newTags := make([]uint64, len(newColNames))
existingTags := set.NewUint64Set(rootSuperSchema.AllTags())
for i := range newTags {
newTags[i] = schema.AutoGenerateTag(rootSuperSchema, tableName, existingColKinds, newColNames[i], newColKinds[i])
newTags[i] = schema.AutoGenerateTag(existingTags, tableName, existingColKinds, newColNames[i], newColKinds[i])
existingColKinds = append(existingColKinds, newColKinds[i])
existingTags.Add(newTags[i])
}
return newTags, nil

View File

@@ -111,7 +111,7 @@ func (q Query) CommandString() string { return fmt.Sprintf("query %s", q.Query)
func (q Query) Exec(t *testing.T, dEnv *env.DoltEnv) error {
root, err := dEnv.WorkingRoot(context.Background())
require.NoError(t, err)
sqlDb := dsqle.NewDatabase("dolt", root, dEnv.DoltDB, nil)
sqlDb := dsqle.NewDatabase("dolt", root, dEnv.DoltDB, nil, nil)
engine, sqlCtx, err := dsqle.NewTestEngine(context.Background(), sqlDb, root)
require.NoError(t, err)

View File

@@ -21,8 +21,8 @@ import (
"github.com/liquidata-inc/dolt/go/store/types"
)
var initialReadme = "This is a repository level README. Either edit it, add it, and commit it, or remove the file."
var initialLicense = "This is a repository level LICENSE. Either edit it, add it, and commit it, or remove the file."
var initialReadme = "This is a repository level README. Either edit it, add it, and commit it, or remove the file.\n"
var initialLicense = "This is a repository level LICENSE. Either edit it, add it, and commit it, or remove the file.\n"
type Docs []doltdb.DocDetails

View File

@@ -364,8 +364,16 @@ func (dEnv *DoltEnv) UpdateWorkingRoot(ctx context.Context, newRoot *doltdb.Root
return doltdb.ErrNomsIO
}
dEnv.RepoState.Working = h.String()
err = dEnv.RepoState.Save(dEnv.FS)
return dEnv.RepoStateWriter().SetWorkingHash(ctx, h)
}
type repoStateWriter struct {
dEnv *DoltEnv
}
func (r *repoStateWriter) SetWorkingHash(ctx context.Context, h hash.Hash) error {
r.dEnv.RepoState.Working = h.String()
err := r.dEnv.RepoState.Save(r.dEnv.FS)
if err != nil {
return ErrStateUpdate
@@ -374,6 +382,10 @@ func (dEnv *DoltEnv) UpdateWorkingRoot(ctx context.Context, newRoot *doltdb.Root
return nil
}
func (dEnv *DoltEnv) RepoStateWriter() RepoStateWriter {
return &repoStateWriter{dEnv}
}
func (dEnv *DoltEnv) HeadRoot(ctx context.Context) (*doltdb.RootValue, error) {
commit, err := dEnv.DoltDB.Resolve(ctx, dEnv.RepoState.CWBHeadSpec())

View File

@@ -23,6 +23,7 @@ import (
)
const (
homeEnvVar = "HOME"
doltRootPathEnvVar = "DOLT_ROOT_PATH"
credsDir = "creds"
@@ -46,7 +47,9 @@ func GetCurrentUserHomeDir() (string, error) {
if doltRootPath, ok := os.LookupEnv(doltRootPathEnvVar); ok && doltRootPath != "" {
return doltRootPath, nil
}
if homeEnvPath, ok := os.LookupEnv(homeEnvVar); ok && homeEnvPath != "" {
return homeEnvPath, nil
}
if usr, err := user.Current(); err != nil {
return "", err
} else {

View File

@@ -15,6 +15,7 @@
package env
import (
"context"
"encoding/json"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
@@ -30,6 +31,13 @@ type RepoStateReader interface {
StagedHash() hash.Hash
}
type RepoStateWriter interface {
// SetCWBHeadRef(context.Context, ref.DoltRef) error
// SetCWBHeadSpec(context.Context, *doltdb.CommitSpec) error
SetWorkingHash(context.Context, hash.Hash) error
// SetStagedHash(context.Context, hash.Hash) error
}
type BranchConfig struct {
Merge ref.MarshalableRef `json:"head"`
Remote string `json:"remote"`

View File

@@ -531,7 +531,7 @@ func checkSchema(t *testing.T, r *doltdb.RootValue, tableName string, expectedSc
}
func checkRows(t *testing.T, ddb *doltdb.DoltDB, root *doltdb.RootValue, tableName string, sch schema.Schema, selectQuery string, expectedRows []row.Row) {
sqlDb := dsqle.NewDatabase("dolt", root, ddb, nil)
sqlDb := dsqle.NewDatabase("dolt", root, ddb, nil, nil)
engine, sqlCtx, err := dsqle.NewTestEngine(context.Background(), sqlDb, root)
require.NoError(t, err)

View File

@@ -16,7 +16,6 @@ package rebase
import (
"context"
"errors"
"fmt"
"time"
@@ -27,16 +26,22 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/encoding"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/liquidata-inc/dolt/go/libraries/utils/set"
ndiff "github.com/liquidata-inc/dolt/go/store/diff"
"github.com/liquidata-inc/dolt/go/store/hash"
"github.com/liquidata-inc/dolt/go/store/types"
)
const diffBufSize = 4096
// { tableName -> { oldTag -> newTag }}
type TagMapping map[string]map[uint64]uint64
// NeedsUniqueTagMigration checks if a repo was created before the unique tags constraint and migrates it if necessary.
func NeedsUniqueTagMigration(ctx context.Context, dEnv *env.DoltEnv) (bool, error) {
bb, err := dEnv.DoltDB.GetBranches(ctx)
// NeedsUniqueTagMigration checks if a repo needs a unique tags migration
func NeedsUniqueTagMigration(ctx context.Context, ddb *doltdb.DoltDB) (bool, error) {
bb, err := ddb.GetBranches(ctx)
if err != nil {
return false, err
@@ -49,12 +54,22 @@ func NeedsUniqueTagMigration(ctx context.Context, dEnv *env.DoltEnv) (bool, erro
return false, err
}
c, err := dEnv.DoltDB.Resolve(ctx, cs)
c, err := ddb.Resolve(ctx, cs)
if err != nil {
return false, err
}
// check if this head commit is an init commit
n, err := c.NumParents()
if err != nil {
return false, err
}
if n == 0 {
// init commits don't need migration
continue
}
r, err := c.GetRootValue()
if err != nil {
@@ -107,30 +122,47 @@ func MigrateUniqueTags(ctx context.Context, dEnv *env.DoltEnv) error {
headCommits = append(headCommits, cm)
}
// DFS the commit graph find a unique new tag for all existing tags in every table in history
globalMapping := make(map[string]map[uint64]uint64)
replay := func(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (*doltdb.RootValue, error) {
err := buildGlobalTagMapping(ctx, root, parentRoot, rebasedParentRoot, globalMapping)
if err != nil {
return nil, err
}
return root, nil
}
_, err = rebase(ctx, ddb, replay, entireHistory, headCommits...)
if err != nil {
return err
}
if len(branches) != len(headCommits) {
panic("error in uniquifying tags")
}
newCommits, err := TagRebaseForCommits(ctx, ddb, globalMapping, headCommits...)
builtTagMappings := make(map[hash.Hash]TagMapping)
// DFS the commit graph find a unique new tag for all existing tags in every table in history
replay := func(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue) (rebaseRoot *doltdb.RootValue, err error) {
h, err := rebasedParentRoot.HashOf()
if err != nil {
return nil, err
}
parentTagMapping, found := builtTagMappings[h]
if !found {
parentTagMapping = make(TagMapping)
if !rootsMustBeEqual(parentRoot, rebasedParentRoot) {
return nil, fmt.Errorf("error rebasing, roots not equal")
}
}
tagMapping, err := buildTagMapping(ctx, root, rebasedParentRoot, parentTagMapping)
if err != nil {
return nil, err
}
rebasedRoot, err := replayCommitWithNewTag(ctx, root, parentRoot, rebasedParentRoot, tagMapping)
if err != nil {
return nil, err
}
rh, err := rebasedRoot.HashOf()
if err != nil {
return nil, err
}
builtTagMappings[rh] = tagMapping
return rebasedRoot, nil
}
newCommits, err := rebase(ctx, ddb, replay, entireHistory, headCommits...)
if err != nil {
return err
@@ -255,14 +287,35 @@ func TagRebaseForCommits(ctx context.Context, ddb *doltdb.DoltDB, tm TagMapping,
func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParentRoot *doltdb.RootValue, tm TagMapping) (*doltdb.RootValue, error) {
newRoot := root
for tblName, tableMapping := range tm {
tableNames, err := doltdb.UnionTableNames(ctx, root, rebasedParentRoot)
tbl, found, err := newRoot.GetTable(ctx, tblName)
if err != nil {
return nil, err
}
newRoot := rebasedParentRoot
for _, tblName := range tableNames {
tbl, found, err := root.GetTable(ctx, tblName)
if err != nil {
return nil, err
}
if !found {
// table was deleted since parent commit
ok, err := newRoot.HasTable(ctx, tblName)
if err != nil {
return nil, err
}
if !ok {
return nil, fmt.Errorf("error rebasing, table %s not found in rebasedParentRoot", tblName)
}
newRoot, err = newRoot.RemoveTables(ctx, tblName)
if err != nil {
return nil, err
}
continue
}
@@ -271,16 +324,21 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
return nil, err
}
// tags may not exist in this commit
tagExists := false
for oldTag, _ := range tableMapping {
if _, found := sch.GetAllCols().GetByTag(oldTag); found {
tagExists = true
break
}
// only rebase this table if we have a mapping for it, and at least one of the
// tags in the mapping is present in its schema at this commit
tableNeedsRebasing := false
tableMapping, found := tm[tblName]
if found {
_ = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
if _, found = tableMapping[tag]; found {
tableNeedsRebasing = true
}
return tableNeedsRebasing, nil
})
}
if !tagExists {
continue
if !tableNeedsRebasing {
newRoot, err = newRoot.PutTable(ctx, tblName, tbl)
}
parentTblName := tblName
@@ -305,7 +363,7 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
rebasedSch := schema.SchemaFromCols(schCC)
// super schema rebase
ss, _, err := newRoot.GetSuperSchema(ctx, tblName)
ss, _, err := root.GetSuperSchema(ctx, tblName)
if err != nil {
return nil, err
@@ -357,7 +415,7 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
return nil, err
}
rebasedRows, err := replayRowDiffs(ctx, rebasedSch, rows, parentRows, rebasedParentRows, tableMapping)
rebasedRows, err := replayRowDiffs(ctx, rebasedParentRoot.VRW(), rebasedSch, rows, parentRows, rebasedParentRows, tableMapping)
if err != nil {
return nil, err
@@ -379,14 +437,14 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
return nil, err
}
rebasedRoot, err := newRoot.PutSuperSchema(ctx, tblName, rebasedSS)
newRoot, err = newRoot.PutSuperSchema(ctx, tblName, rebasedSS)
if err != nil {
return nil, err
}
// create new RootValue by overwriting table with rebased rows and schema
newRoot, err = rebasedRoot.PutTable(ctx, tblName, rebasedTable)
newRoot, err = newRoot.PutTable(ctx, tblName, rebasedTable)
if err != nil {
return nil, err
@@ -395,53 +453,68 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
return newRoot, nil
}
func replayRowDiffs(ctx context.Context, rSch schema.Schema, rows, parentRows, rebasedParentRows types.Map, tagMapping map[uint64]uint64) (types.Map, error) {
func replayRowDiffs(ctx context.Context, vrw types.ValueReadWriter, rSch schema.Schema, rows, parentRows, rebasedParentRows types.Map, tagMapping map[uint64]uint64) (types.Map, error) {
// we will apply modified differences to the rebasedParent
rebasedRowEditor := rebasedParentRows.Edit()
unmappedTags := set.NewUint64Set(rSch.GetAllCols().Tags)
tm := make(map[uint64]uint64)
for ot, nt := range tagMapping {
tm[ot] = nt
unmappedTags.Remove(nt)
}
for _, t := range unmappedTags.AsSlice() {
tm[t] = t
}
ad := diff.NewAsyncDiffer(1024)
nmu := noms.NewNomsMapUpdater(ctx, vrw, rebasedParentRows, rSch, func(stats types.AppliedEditStats) {})
ad := diff.NewAsyncDiffer(diffBufSize)
// get all differences (including merges) between original commit and its parent
ad.Start(ctx, rows, parentRows)
defer ad.Close()
for {
diffs, err := ad.GetDiffs(1, time.Second)
if ad.IsDone() {
break
}
diffs, err := ad.GetDiffs(diffBufSize/2, time.Second)
if err != nil {
return types.EmptyMap, err
}
if len(diffs) != 1 {
panic("only a single diff requested, multiple returned. bug in AsyncDiffer")
}
for _, d := range diffs {
if d.KeyValue == nil {
panic("Unexpected commit diff result: with nil key value encountered")
}
d := diffs[0]
if d.KeyValue == nil {
panic("Unexpected commit diff result: with nil key value encountered")
}
key, newVal, err := modifyDifferenceTag(d, rows.Format(), rSch, tm)
key, newVal, err := modifyDifferenceTag(d, rows.Format(), rSch, tagMapping)
if err != nil {
return types.EmptyMap, nil
}
if err != nil {
return types.EmptyMap, nil
}
switch d.ChangeType {
case types.DiffChangeAdded:
err = nmu.WriteEdit(ctx, key, newVal)
case types.DiffChangeRemoved:
err = nmu.WriteEdit(ctx, key, nil)
case types.DiffChangeModified:
err = nmu.WriteEdit(ctx, key, newVal)
}
switch d.ChangeType {
case types.DiffChangeAdded:
rebasedRowEditor.Set(key, newVal)
case types.DiffChangeRemoved:
rebasedRowEditor.Remove(key)
case types.DiffChangeModified:
rebasedRowEditor.Set(key, newVal)
if err != nil {
return types.EmptyMap, err
}
}
}
return rebasedRowEditor.Map(ctx)
err := nmu.Close(ctx)
if err != nil {
return types.EmptyMap, err
}
return *nmu.GetMap(), nil
}
func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, rows types.Map, sch, parentSch schema.Schema) (types.Map, error) {
@@ -449,11 +522,11 @@ func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, ro
return rows, nil
}
eq, err := schema.SchemasAreEqual(sch, parentSch)
deletedCols, err := typed.TypedColCollectionSubtraction(parentSch, sch)
if err != nil {
return types.EmptyMap, err
}
if eq {
if deletedCols.Size() == 0 {
return rows, nil
}
@@ -518,45 +591,99 @@ func dropValsForDeletedColumns(ctx context.Context, nbf *types.NomsBinFormat, ro
return prunedRowData, nil
}
func modifyDifferenceTag(d *ndiff.Difference, nbf *types.NomsBinFormat, rSch schema.Schema, tagMapping map[uint64]uint64) (key types.LesserValuable, val types.Valuable, err error) {
ktv, err := row.ParseTaggedValues(d.KeyValue.(types.Tuple))
func modifyDifferenceTag(d *ndiff.Difference, nbf *types.NomsBinFormat, rSch schema.Schema, tagMapping map[uint64]uint64) (keyTup types.LesserValuable, valTup types.Valuable, err error) {
k := d.KeyValue.(types.Tuple)
if k.Len()%2 != 0 {
panic("A tagged tuple must have an even column count.")
}
kItr, err := k.Iterator()
if err != nil {
return nil, nil, err
}
idx := 0
kk := make([]types.Value, k.Len())
for kItr.HasMore() {
_, tag, err := kItr.Next()
if err != nil {
return nil, nil, err
}
// i.HasMore() is true here because of assertion above.
_, val, err := kItr.Next()
if err != nil {
return nil, nil, err
}
if tag.Kind() != types.UintKind {
panic("Invalid tagged tuple must have uint tags.")
}
if val != types.NullValue {
newTag := tagMapping[uint64(tag.(types.Uint))]
kk[idx] = types.Uint(newTag)
kk[idx+1] = val
}
idx += 2
}
keyTup, err = types.NewTuple(nbf, kk...)
if err != nil {
return nil, nil, err
}
newKtv := make(row.TaggedValues)
for tag, val := range ktv {
newTag, found := tagMapping[tag]
if !found {
newTag = tag
}
newKtv[newTag] = val
if d.NewValue == nil {
return keyTup, nil, nil
}
key = newKtv.NomsTupleForTags(nbf, rSch.GetPKCols().Tags, true)
v := d.NewValue.(types.Tuple)
if v.Len()%2 != 0 {
panic("A tagged tuple must have an even column count.")
}
val = d.NewValue
if d.NewValue != nil {
tv, err := row.ParseTaggedValues(d.NewValue.(types.Tuple))
vItr, err := v.Iterator()
if err != nil {
return nil, nil, err
}
idx = 0
vv := make([]types.Value, v.Len())
for vItr.HasMore() {
_, tag, err := vItr.Next()
if err != nil {
return nil, nil, err
}
newTv := make(row.TaggedValues)
for tag, val := range tv {
newTag, found := tagMapping[tag]
if !found {
newTag = tag
}
newTv[newTag] = val
// i.HasMore() is true here because of assertion above.
_, val, err := vItr.Next()
if err != nil {
return nil, nil, err
}
val = newTv.NomsTupleForTags(nbf, rSch.GetNonPKCols().Tags, false)
if tag.Kind() != types.UintKind {
panic("Invalid tagged tuple must have uint tags.")
}
if val != types.NullValue {
newTag, ok := tagMapping[uint64(tag.(types.Uint))]
if ok {
vv[idx] = types.Uint(newTag)
vv[idx+1] = val
idx += 2
}
}
}
return key, val, nil
valTup, err = types.NewTuple(nbf, vv[:idx]...)
if err != nil {
return nil, nil, err
}
return keyTup, valTup, nil
}
func tagExistsInHistory(ctx context.Context, c *doltdb.Commit, tagMapping TagMapping) (bool, error) {
@@ -608,91 +735,83 @@ func validateTagMapping(tagMapping TagMapping) error {
return nil
}
func buildGlobalTagMapping(ctx context.Context, root *doltdb.RootValue, parentRoot *doltdb.RootValue, rebasedParentRoot *doltdb.RootValue, globalMapping map[string]map[uint64]uint64) error {
func buildTagMapping(ctx context.Context, root, rebasedParentRoot *doltdb.RootValue, parentTagMapping TagMapping) (TagMapping, error) {
tagMapping := parentTagMapping
// create mappings for new columns
tblNames, err := root.GetTableNames(ctx)
if err != nil {
return err
return nil, err
}
rss, err := doltdb.GetRootValueSuperSchema(ctx, rebasedParentRoot)
if err != nil {
return nil, err
}
existingRebasedTags := set.NewUint64Set(rss.AllTags())
for _, tn := range tblNames {
if doltdb.HasDoltPrefix(tn) {
err = handleSystemTableMappings(ctx, tn, root, globalMapping)
err = handleSystemTableMappings(ctx, tn, root, tagMapping)
if err != nil {
return err
return nil, err
}
continue
}
if _, found := globalMapping[tn]; !found {
globalMapping[tn] = make(map[uint64]uint64)
if _, found := tagMapping[tn]; !found {
tagMapping[tn] = make(map[uint64]uint64)
}
t, _, err := root.GetTable(ctx, tn)
if err != nil {
return err
return nil, err
}
sch, err := t.GetSchema(ctx)
if err != nil {
return err
return nil, err
}
foundParent, err := parentRoot.HasTable(ctx, tn)
if err != nil {
return err
}
// for this table, get the new columns in root since parentRoot
var cc *schema.ColCollection
var parentSS *schema.SuperSchema
if foundParent {
var found bool
parentSS, found, err = parentRoot.GetSuperSchema(ctx, tn)
if err != nil {
return err
}
if !found {
return fmt.Errorf("error generating unique tags for migration, cannot find super schema for table %s", tn)
}
cc, _ = schema.NewColCollection()
err = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
if _, found := parentSS.GetByTag(tag); !found {
cc, err = cc.Append(col)
}
stop = err != nil
return stop, err
})
} else {
cc = sch.GetAllCols()
}
var colNames []string
var colKinds []types.NomsKind
var newColNames []string
var newColKinds []types.NomsKind
var oldTags []uint64
_ = cc.Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
colNames = append(colNames, col.Name)
colKinds = append(colKinds, col.Kind)
oldTags = append(oldTags, tag)
var existingColKinds []types.NomsKind
_ = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
_, found := tagMapping[tn][tag]
if !found {
newColNames = append(newColNames, col.Name)
newColKinds = append(newColKinds, col.Kind)
oldTags = append(oldTags, tag)
} else {
existingColKinds = append(existingColKinds, col.Kind)
}
return false, nil
})
newTags, err := rebasedParentRoot.GenerateTagsForNewColumns(ctx, tn, colNames, colKinds)
if err != nil {
return err
}
if len(oldTags) != len(newTags) {
return errors.New("error generating unique tags for migration")
// generate tags with the same method as root.GenerateTagsForNewColumns()
newTags := make([]uint64, len(newColNames))
for i := range newTags {
newTags[i] = schema.AutoGenerateTag(existingRebasedTags, tn, existingColKinds, newColNames[i], newColKinds[i])
existingColKinds = append(existingColKinds, newColKinds[i])
existingRebasedTags.Add(newTags[i])
}
for i, ot := range oldTags {
if _, found := globalMapping[tn][ot]; !found {
globalMapping[tn][ot] = newTags[i]
}
tagMapping[tn][ot] = newTags[i]
}
}
return nil
err = validateTagMapping(tagMapping)
if err != nil {
return nil, err
}
return tagMapping, nil
}
func handleSystemTableMappings(ctx context.Context, tblName string, root *doltdb.RootValue, globalMapping map[string]map[uint64]uint64) error {
@@ -740,3 +859,15 @@ func handleSystemTableMappings(ctx context.Context, tblName string, root *doltdb
return nil
}
func rootsMustBeEqual(r1, r2 *doltdb.RootValue) bool {
h1, err := r1.HashOf()
if err != nil {
panic(err)
}
h2, err := r2.HashOf()
if err != nil {
panic(err)
}
return h1.Equal(h2)
}

View File

@@ -190,7 +190,7 @@ func isNecessary(srcSch, destSch schema.Schema, destToSrc map[uint64]uint64) (bo
return true, nil
}
if srcCol.Kind != destCol.Kind {
if !srcCol.TypeInfo.Equals(destCol.TypeInfo) {
return true, nil
}
}

View File

@@ -118,12 +118,8 @@ func TestJSONMarshalling(t *testing.T) {
func TestTypeInfoMarshalling(t *testing.T) {
//TODO: determine the storage format for BINARY
//TODO: determine the storage format for BLOB
//TODO: determine the storage format for DECIMAL
//TODO: determine the storage format for ENUM
//TODO: determine the storage format for LONGBLOB
//TODO: determine the storage format for MEDIUMBLOB
//TODO: determine the storage format for SET
//TODO: determine the storage format for TIME
//TODO: determine the storage format for TINYBLOB
//TODO: determine the storage format for VARBINARY
sqlTypes := []sql.Type{
@@ -134,11 +130,11 @@ func TestTypeInfoMarshalling(t *testing.T) {
//sql.Blob, //BLOB
sql.Boolean, //BOOLEAN
sql.MustCreateStringWithDefaults(sqltypes.Char, 10), //CHAR(10)
sql.Date, //DATE
sql.Datetime, //DATETIME
//sql.MustCreateDecimalType(9, 5), //DECIMAL(9, 5)
sql.Float64, //DOUBLE
//sql.MustCreateEnumType([]string{"a", "b", "c"}, sql.Collation_Default), //ENUM('a','b','c')
sql.Date, //DATE
sql.Datetime, //DATETIME
sql.MustCreateDecimalType(9, 5), //DECIMAL(9, 5)
sql.Float64, //DOUBLE
sql.MustCreateEnumType([]string{"a", "b", "c"}, sql.Collation_Default), //ENUM('a','b','c')
sql.Float32, //FLOAT
sql.Int32, //INT
sql.Uint32, //INT UNSIGNED
@@ -148,11 +144,11 @@ func TestTypeInfoMarshalling(t *testing.T) {
sql.Int24, //MEDIUMINT
sql.Uint24, //MEDIUMINT UNSIGNED
sql.MediumText, //MEDIUMTEXT
//sql.MustCreateSetType([]string{"a", "b", "c"}, sql.Collation_Default), //SET('a','b','c')
sql.Int16, //SMALLINT
sql.Uint16, //SMALLINT UNSIGNED
sql.Text, //TEXT
//sql.Time, //TIME
sql.MustCreateSetType([]string{"a", "b", "c"}, sql.Collation_Default), //SET('a','b','c')
sql.Int16, //SMALLINT
sql.Uint16, //SMALLINT UNSIGNED
sql.Text, //TEXT
sql.Time, //TIME
sql.Timestamp, //TIMESTAMP
//sql.TinyBlob, //TINYBLOB
sql.Int8, //TINYINT

View File

@@ -120,6 +120,11 @@ func (ss *SuperSchema) AllColumnNames(tag uint64) []string {
return ss.tagNames[tag]
}
// AllTags returns a slice of all tags contained in the SuperSchema
func (ss *SuperSchema) AllTags() []uint64 {
return ss.allCols.Tags
}
// LatestColumnName returns the latest name of the column corresponding to tag
func (ss *SuperSchema) LatestColumnName(tag uint64) string {
return ss.tagNames[tag][0]

View File

@@ -22,6 +22,7 @@ import (
"regexp"
"strings"
"github.com/liquidata-inc/dolt/go/libraries/utils/set"
"github.com/liquidata-inc/dolt/go/store/types"
)
@@ -40,11 +41,11 @@ func ErrTagPrevUsed(tag uint64, newColName, tableName string) error {
// and repositories that perform the same sequence of mutations to a database will get equivalent databases as a result.
// DETERMINISTIC MUTATION IS A CRITICAL INVARIANT TO MAINTAINING COMPATIBILITY BETWEEN REPOSITORIES.
// DO NOT ALTER THIS METHOD.
func AutoGenerateTag(rootSS *SuperSchema, tableName string, existingColKinds []types.NomsKind, newColName string, newColKind types.NomsKind) uint64 {
func AutoGenerateTag(existingTags *set.Uint64Set, tableName string, existingColKinds []types.NomsKind, newColName string, newColKind types.NomsKind) uint64 {
// DO NOT ALTER THIS METHOD (see above)
var maxTagVal uint64 = 128 * 128
for maxTagVal/2 < uint64(rootSS.Size()) {
for maxTagVal/2 < uint64(existingTags.Size()) {
if maxTagVal >= ReservedTagMin-1 {
panic("There is no way anyone should ever have this many columns. You are a bad person if you hit this panic.")
} else if maxTagVal*128 < maxTagVal {
@@ -60,7 +61,7 @@ func AutoGenerateTag(rootSS *SuperSchema, tableName string, existingColKinds []t
for {
randTag = uint64(randGen.Int63n(int64(maxTagVal)))
if _, found := rootSS.GetByTag(randTag); !found {
if !existingTags.Contains(randTag) {
break
}
}

View File

@@ -135,6 +135,29 @@ func generateVarBinaryType(t *testing.T, length int64, pad bool) *varBinaryType
return &varBinaryType{sql.MustCreateBinary(sqltypes.VarBinary, length)}
}
func generateVarStringTypes(t *testing.T, numOfTypes uint16) []TypeInfo {
var res []TypeInfo
loop(t, 1, 500, numOfTypes, func(i int64) {
rts := false
if i%2 == 0 {
rts = true
}
res = append(res, generateVarStringType(t, i, rts))
})
return res
}
func generateVarStringType(t *testing.T, length int64, rts bool) *varStringType {
require.True(t, length > 0)
if rts {
t, err := sql.CreateStringWithDefaults(sqltypes.Char, length)
if err == nil {
return &varStringType{t}
}
}
return &varStringType{sql.MustCreateStringWithDefaults(sqltypes.VarChar, length)}
}
func loop(t *testing.T, start int64, endInclusive int64, numOfSteps uint16, loopedFunc func(int64)) {
require.True(t, endInclusive > start)
maxNumOfSteps := endInclusive - start + 1

View File

@@ -18,6 +18,7 @@ import (
"fmt"
"strconv"
"github.com/shopspring/decimal"
"github.com/src-d/go-mysql-server/sql"
"github.com/liquidata-inc/dolt/go/store/types"
@@ -66,12 +67,8 @@ func CreateDecimalTypeFromParams(params map[string]string) (TypeInfo, error) {
// ConvertNomsValueToValue implements TypeInfo interface.
func (ti *decimalType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {
if val, ok := v.(types.String); ok {
res, err := ti.sqlDecimalType.Convert(string(val))
if err != nil {
return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val)
}
return res, nil
if val, ok := v.(types.Decimal); ok {
return ti.sqlDecimalType.Convert(decimal.Decimal(val))
}
if _, ok := v.(types.Null); ok || v == nil {
return nil, nil
@@ -84,15 +81,14 @@ func (ti *decimalType) ConvertValueToNomsValue(v interface{}) (types.Value, erro
if v == nil {
return types.NullValue, nil
}
strVal, err := ti.sqlDecimalType.Convert(v)
decVal, err := ti.sqlDecimalType.ConvertToDecimal(v)
if err != nil {
return nil, err
}
val, ok := strVal.(string)
if ok {
return types.String(val), nil
if !decVal.Valid {
return nil, fmt.Errorf(`"%v" has unexpectedly encountered a null value from embedded type`, ti.String())
}
return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v)
return types.Decimal(decVal.Decimal), nil
}
// Equals implements TypeInfo interface.
@@ -144,7 +140,7 @@ func (ti *decimalType) IsValid(v types.Value) bool {
// NomsKind implements TypeInfo interface.
func (ti *decimalType) NomsKind() types.NomsKind {
return types.StringKind
return types.DecimalKind
}
// ParseValue implements TypeInfo interface.
@@ -152,14 +148,7 @@ func (ti *decimalType) ParseValue(str *string) (types.Value, error) {
if str == nil || *str == "" {
return types.NullValue, nil
}
strVal, err := ti.sqlDecimalType.Convert(*str)
if err != nil {
return nil, err
}
if val, ok := strVal.(string); ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str)
return ti.ConvertValueToNomsValue(*str)
}
// String implements TypeInfo interface.

View File

@@ -0,0 +1,595 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package typeinfo
import (
"fmt"
"math/big"
"testing"
"time"
"github.com/shopspring/decimal"
"github.com/src-d/go-mysql-server/sql"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/liquidata-inc/dolt/go/store/types"
)
func TestDecimalConvertNomsValueToValue(t *testing.T) {
tests := []struct {
typ *decimalType
input types.Decimal
output string
expectedErr bool
}{
{
generateDecimalType(t, 1, 0),
types.Decimal(decimal.RequireFromString("0")),
"0",
false,
},
{
generateDecimalType(t, 1, 0),
types.Decimal(decimal.RequireFromString("-1.5")),
"-2",
false,
},
{
generateDecimalType(t, 2, 1),
types.Decimal(decimal.RequireFromString("-1.5")),
"-1.5",
false,
},
{
generateDecimalType(t, 5, 4),
types.Decimal(decimal.RequireFromString("-5.7159")),
"-5.7159",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("4723245")),
"4723245.00",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("4723245.01")),
"4723245.01",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("14723245.01")),
"",
true,
},
{
generateDecimalType(t, 5, 4),
types.Decimal(decimal.RequireFromString("55.7159")),
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertNomsValueToValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, output)
}
})
}
}
func TestDecimalConvertValueToNomsValue(t *testing.T) {
tests := []struct {
typ *decimalType
input interface{}
output types.Decimal
expectedErr bool
}{
{
generateDecimalType(t, 1, 0),
7,
types.Decimal(decimal.RequireFromString("7")),
false,
},
{
generateDecimalType(t, 5, 1),
-4.5,
types.Decimal(decimal.RequireFromString("-4.5")),
false,
},
{
generateDecimalType(t, 10, 0),
"77",
types.Decimal(decimal.RequireFromString("77")),
false,
},
{
generateDecimalType(t, 5, 0),
"dog",
types.Decimal{},
true,
},
{
generateDecimalType(t, 15, 7),
true,
types.Decimal{},
true,
},
{
generateDecimalType(t, 20, 5),
time.Unix(137849, 0),
types.Decimal{},
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertValueToNomsValue(test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.True(t, test.output.Equals(output))
} else {
assert.Error(t, err)
}
})
}
}
func TestDecimalFormatValue(t *testing.T) {
tests := []struct {
typ *decimalType
input types.Decimal
output string
expectedErr bool
}{
{
generateDecimalType(t, 1, 0),
types.Decimal(decimal.RequireFromString("0")),
"0",
false,
},
{
generateDecimalType(t, 1, 0),
types.Decimal(decimal.RequireFromString("-1.5")),
"-2",
false,
},
{
generateDecimalType(t, 2, 1),
types.Decimal(decimal.RequireFromString("-1.5")),
"-1.5",
false,
},
{
generateDecimalType(t, 5, 4),
types.Decimal(decimal.RequireFromString("-5.7159")),
"-5.7159",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("4723245")),
"4723245.00",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("4723245.01")),
"4723245.01",
false,
},
{
generateDecimalType(t, 9, 2),
types.Decimal(decimal.RequireFromString("14723245.01")),
"",
true,
},
{
generateDecimalType(t, 5, 4),
types.Decimal(decimal.RequireFromString("55.7159")),
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.FormatValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, *output)
}
})
}
}
func TestDecimalParseValue(t *testing.T) {
tests := []struct {
typ *decimalType
input string
output types.Decimal
expectedErr bool
}{
{
generateDecimalType(t, 1, 0),
"0",
types.Decimal(decimal.RequireFromString("0")),
false,
},
{
generateDecimalType(t, 1, 0),
"-1.5",
types.Decimal(decimal.RequireFromString("-2")),
false,
},
{
generateDecimalType(t, 2, 1),
"-1.5",
types.Decimal(decimal.RequireFromString("-1.5")),
false,
},
{
generateDecimalType(t, 5, 4),
"-5.7159",
types.Decimal(decimal.RequireFromString("-5.7159")),
false,
},
{
generateDecimalType(t, 9, 2),
"4723245.00",
types.Decimal(decimal.RequireFromString("4723245.00")),
false,
},
{
generateDecimalType(t, 13, 2),
"4723245.01",
types.Decimal(decimal.RequireFromString("4723245.01")),
false,
},
{
generateDecimalType(t, 9, 2),
"24723245.01",
types.Decimal{},
true,
},
{
generateDecimalType(t, 5, 4),
"-44.2841",
types.Decimal{},
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ParseValue(&test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.True(t, test.output.Equals(output))
} else {
assert.Error(t, err)
}
})
}
}
func TestDecimalMarshal(t *testing.T) {
tests := []struct {
precision uint8
scale uint8
val interface{}
expectedVal string
expectedErr bool
}{
{1, 0, byte(0), "0", false},
{1, 0, int8(3), "3", false},
{1, 0, "-3.7e0", "-4", false},
{1, 0, uint(4), "4", false},
{1, 0, int16(9), "9", false},
{1, 0, "0.00000000000000000003e20", "3", false},
{1, 0, float64(-9.4), "-9", false},
{1, 0, float32(9.5), "", true},
{1, 0, int32(-10), "", true},
{1, 1, 0, "0.0", false},
{1, 1, .01, "0.0", false},
{1, 1, .1, "0.1", false},
{1, 1, ".22", "0.2", false},
{1, 1, .55, "0.6", false},
{1, 1, "-.7863294659345624", "-0.8", false},
{1, 1, "2634193746329327479.32030573792e-19", "0.3", false},
{1, 1, 1, "", true},
{1, 1, new(big.Rat).SetInt64(2), "", true},
{5, 0, 0, "0", false},
{5, 0, -5, "-5", false},
{5, 0, -99995, "-99995", false},
{5, 0, 5000.2, "5000", false},
{5, 0, "7742", "7742", false},
{5, 0, new(big.Float).SetFloat64(-4723.875), "-4724", false},
{5, 0, 99999, "99999", false},
{5, 0, "0xf8e1", "63713", false},
{5, 0, "0b1001110101100110", "40294", false},
{5, 0, new(big.Rat).SetFrac64(999999, 10), "", true},
{5, 0, 673927, "", true},
{10, 5, 0, "0.00000", false},
{10, 5, "25.1", "25.10000", false},
{10, 5, "-25.1", "-25.10000", false},
{10, 5, "-99205.8572", "-99205.85720", false},
{10, 5, "99999.999994", "99999.99999", false},
{10, 5, "5.5729136e3", "5572.91360", false},
{10, 5, "600e-2", "6.00000", false},
{10, 5, new(big.Rat).SetFrac64(-22, 7), "-3.14286", false},
{10, 5, "-99995.1", "-99995.10000", false},
{10, 5, 100000, "", true},
{10, 5, "-99999.999995", "", true},
{65, 0, "99999999999999999999999999999999999999999999999999999999999999999",
"99999999999999999999999999999999999999999999999999999999999999999", false},
{65, 0, "99999999999999999999999999999999999999999999999999999999999999999.1",
"99999999999999999999999999999999999999999999999999999999999999999", false},
{65, 0, "99999999999999999999999999999999999999999999999999999999999999999.99", "", true},
{65, 12, "16976349273982359874209023948672021737840592720387475.2719128737543572927374503832837350563300243035038234972093785",
"16976349273982359874209023948672021737840592720387475.271912873754", false},
{65, 12, "99999999999999999999999999999999999999999999999999999.9999999999999", "", true},
{20, 10, []byte{32}, "", true},
{20, 10, time.Date(2019, 12, 12, 12, 12, 12, 0, time.UTC), "", true},
}
for _, test := range tests {
t.Run(fmt.Sprintf("%v %v %v", test.precision, test.scale, test.val), func(t *testing.T) {
typ := &decimalType{sql.MustCreateDecimalType(test.precision, test.scale)}
val, err := typ.ConvertValueToNomsValue(test.val)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
assert.Equal(t, test.expectedVal, typ.sqlDecimalType.MustConvert(decimal.Decimal(val.(types.Decimal))))
umar, err := typ.ConvertNomsValueToValue(val)
require.NoError(t, err)
testVal := typ.sqlDecimalType.MustConvert(test.val)
cmp, err := typ.sqlDecimalType.Compare(testVal, umar)
require.NoError(t, err)
assert.Equal(t, 0, cmp)
}
})
}
}
func TestDecimalRoundTrip(t *testing.T) {
tests := []struct {
typ *decimalType
input string
output string
expectedErr bool
}{
{
generateDecimalType(t, 1, 0),
"0",
"0",
false,
},
{
generateDecimalType(t, 4, 1),
"0",
"0.0",
false,
},
{
generateDecimalType(t, 9, 4),
"0",
"0.0000",
false,
},
{
generateDecimalType(t, 26, 0),
"0",
"0",
false,
},
{
generateDecimalType(t, 48, 22),
"0",
"0.0000000000000000000000",
false,
},
{
generateDecimalType(t, 65, 30),
"0",
"0.000000000000000000000000000000",
false,
},
{
generateDecimalType(t, 1, 0),
"-1.5",
"-2",
false,
},
{
generateDecimalType(t, 4, 1),
"-1.5",
"-1.5",
false,
},
{
generateDecimalType(t, 9, 4),
"-1.5",
"-1.5000",
false,
},
{
generateDecimalType(t, 26, 0),
"-1.5",
"-2",
false,
},
{
generateDecimalType(t, 48, 22),
"-1.5",
"-1.5000000000000000000000",
false,
},
{
generateDecimalType(t, 65, 30),
"-1.5",
"-1.500000000000000000000000000000",
false,
},
{
generateDecimalType(t, 1, 0),
"9351580",
"",
true,
},
{
generateDecimalType(t, 4, 1),
"9351580",
"",
true,
},
{
generateDecimalType(t, 9, 4),
"9351580",
"",
true,
},
{
generateDecimalType(t, 26, 0),
"9351580",
"9351580",
false,
},
{
generateDecimalType(t, 48, 22),
"9351580",
"9351580.0000000000000000000000",
false,
},
{
generateDecimalType(t, 65, 30),
"9351580",
"9351580.000000000000000000000000000000",
false,
},
{
generateDecimalType(t, 1, 0),
"-1076416.875",
"",
true,
},
{
generateDecimalType(t, 4, 1),
"-1076416.875",
"",
true,
},
{
generateDecimalType(t, 9, 4),
"-1076416.875",
"",
true,
},
{
generateDecimalType(t, 26, 0),
"-1076416.875",
"-1076417",
false,
},
{
generateDecimalType(t, 48, 22),
"-1076416.875",
"-1076416.8750000000000000000000",
false,
},
{
generateDecimalType(t, 65, 30),
"-1076416.875",
"-1076416.875000000000000000000000000000",
false,
},
{
generateDecimalType(t, 1, 0),
"198728394234798423466321.27349757",
"",
true,
},
{
generateDecimalType(t, 4, 1),
"198728394234798423466321.27349757",
"",
true,
},
{
generateDecimalType(t, 9, 4),
"198728394234798423466321.27349757",
"",
true,
},
{
generateDecimalType(t, 26, 0),
"198728394234798423466321.27349757",
"198728394234798423466321",
false,
},
{
generateDecimalType(t, 48, 22),
"198728394234798423466321.27349757",
"198728394234798423466321.2734975700000000000000",
false,
},
{
generateDecimalType(t, 65, 30),
"198728394234798423466321.27349757",
"198728394234798423466321.273497570000000000000000000000",
false,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v %v`, test.typ.String(), test.input, test.output), func(t *testing.T) {
parsed, err := test.typ.ConvertValueToNomsValue(test.input)
if !test.expectedErr {
require.NoError(t, err)
output, err := test.typ.ConvertNomsValueToValue(parsed)
require.NoError(t, err)
assert.Equal(t, test.output, output)
parsed2, err := test.typ.ParseValue(&test.input)
require.NoError(t, err)
assert.Equal(t, parsed, parsed2)
output2, err := test.typ.FormatValue(parsed2)
require.NoError(t, err)
assert.Equal(t, test.output, *output2)
} else {
assert.Error(t, err)
_, err = test.typ.ParseValue(&test.input)
assert.Error(t, err)
}
})
}
}

View File

@@ -67,8 +67,8 @@ func CreateEnumTypeFromParams(params map[string]string) (TypeInfo, error) {
// ConvertNomsValueToValue implements TypeInfo interface.
func (ti *enumType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {
if val, ok := v.(types.String); ok {
res, err := ti.sqlEnumType.Convert(string(val))
if val, ok := v.(types.Uint); ok {
res, err := ti.sqlEnumType.Unmarshal(int64(val))
if err != nil {
return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val)
}
@@ -85,15 +85,11 @@ func (ti *enumType) ConvertValueToNomsValue(v interface{}) (types.Value, error)
if v == nil {
return types.NullValue, nil
}
strVal, err := ti.sqlEnumType.Convert(v)
val, err := ti.sqlEnumType.Marshal(v)
if err != nil {
return nil, err
}
val, ok := strVal.(string)
if ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v)
return types.Uint(val), nil
}
// Equals implements TypeInfo interface.
@@ -158,7 +154,7 @@ func (ti *enumType) IsValid(v types.Value) bool {
// NomsKind implements TypeInfo interface.
func (ti *enumType) NomsKind() types.NomsKind {
return types.StringKind
return types.UintKind
}
// ParseValue implements TypeInfo interface.
@@ -166,14 +162,11 @@ func (ti *enumType) ParseValue(str *string) (types.Value, error) {
if str == nil || *str == "" {
return types.NullValue, nil
}
strVal, err := ti.sqlEnumType.Convert(*str)
val, err := ti.sqlEnumType.Marshal(*str)
if err != nil {
return nil, err
}
if val, ok := strVal.(string); ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str)
return types.Uint(val), nil
}
// String implements TypeInfo interface.

View File

@@ -0,0 +1,258 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package typeinfo
import (
"fmt"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/liquidata-inc/dolt/go/store/types"
)
func TestEnumConvertNomsValueToValue(t *testing.T) {
tests := []struct {
typ *enumType
input types.Uint
output string
expectedErr bool
}{
{
generateEnumType(t, 3),
1,
"aaaa",
false,
},
{
generateEnumType(t, 5),
2,
"aaab",
false,
},
{
generateEnumType(t, 8),
3,
"aaac",
false,
},
{
generateEnumType(t, 7),
7,
"aaag",
false,
},
{
generateEnumType(t, 2),
0,
"",
true,
},
{
generateEnumType(t, 3),
4,
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertNomsValueToValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, output)
}
})
}
}
func TestEnumConvertValueToNomsValue(t *testing.T) {
tests := []struct {
typ *enumType
input interface{}
output types.Uint
expectedErr bool
}{
{
generateEnumType(t, 4),
"aaac",
3,
false,
},
{
generateEnumType(t, 7),
uint64(3),
3,
false,
},
{
generateEnumType(t, 4),
"dog",
0,
true,
},
{
generateEnumType(t, 3),
true,
0,
true,
},
{
generateEnumType(t, 10),
time.Unix(137849, 0),
0,
true,
},
{
generateEnumType(t, 5),
complex128(14i),
0,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertValueToNomsValue(test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}
func TestEnumFormatValue(t *testing.T) {
tests := []struct {
typ *enumType
input types.Uint
output string
expectedErr bool
}{
{
generateEnumType(t, 3),
1,
"aaaa",
false,
},
{
generateEnumType(t, 5),
2,
"aaab",
false,
},
{
generateEnumType(t, 8),
3,
"aaac",
false,
},
{
generateEnumType(t, 7),
7,
"aaag",
false,
},
{
generateEnumType(t, 2),
0,
"",
true,
},
{
generateEnumType(t, 3),
4,
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.FormatValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, *output)
}
})
}
}
func TestEnumParseValue(t *testing.T) {
tests := []struct {
typ *enumType
input string
output types.Uint
expectedErr bool
}{
{
generateEnumType(t, 3),
"aaaa",
1,
false,
},
{
generateEnumType(t, 5),
"aaab",
2,
false,
},
{
generateEnumType(t, 8),
"aaac",
3,
false,
},
{
generateEnumType(t, 7),
"aaag",
7,
false,
},
{
generateEnumType(t, 2),
"dog",
0,
true,
},
{
generateEnumType(t, 3),
"aaad",
4,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ParseValue(&test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}

View File

@@ -67,8 +67,8 @@ func CreateSetTypeFromParams(params map[string]string) (TypeInfo, error) {
// ConvertNomsValueToValue implements TypeInfo interface.
func (ti *setType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {
if val, ok := v.(types.String); ok {
res, err := ti.sqlSetType.Convert(string(val))
if val, ok := v.(types.Uint); ok {
res, err := ti.sqlSetType.Unmarshal(uint64(val))
if err != nil {
return nil, fmt.Errorf(`"%v" cannot convert "%v" to value`, ti.String(), val)
}
@@ -85,15 +85,11 @@ func (ti *setType) ConvertValueToNomsValue(v interface{}) (types.Value, error) {
if v == nil {
return types.NullValue, nil
}
strVal, err := ti.sqlSetType.Convert(v)
val, err := ti.sqlSetType.Marshal(v)
if err != nil {
return nil, err
}
val, ok := strVal.(string)
if ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert value "%v" of type "%T" as it is invalid`, ti.String(), v, v)
return types.Uint(val), nil
}
// Equals implements TypeInfo interface.
@@ -158,22 +154,19 @@ func (ti *setType) IsValid(v types.Value) bool {
// NomsKind implements TypeInfo interface.
func (ti *setType) NomsKind() types.NomsKind {
return types.StringKind
return types.UintKind
}
// ParseValue implements TypeInfo interface.
func (ti *setType) ParseValue(str *string) (types.Value, error) {
if str == nil || *str == "" {
if str == nil {
return types.NullValue, nil
}
strVal, err := ti.sqlSetType.Convert(*str)
val, err := ti.sqlSetType.Marshal(*str)
if err != nil {
return nil, err
}
if val, ok := strVal.(string); ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str)
return types.Uint(val), nil
}
// String implements TypeInfo interface.

View File

@@ -0,0 +1,270 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package typeinfo
import (
"fmt"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/liquidata-inc/dolt/go/store/types"
)
func TestSetConvertNomsValueToValue(t *testing.T) {
tests := []struct {
typ *setType
input types.Uint
output string
expectedErr bool
}{
{
generateSetType(t, 2),
0,
"",
false,
},
{
generateSetType(t, 3),
1,
"aa",
false,
},
{
generateSetType(t, 5),
2,
"ab",
false,
},
{
generateSetType(t, 8),
3,
"aa,ab",
false,
},
{
generateSetType(t, 7),
4,
"ac",
false,
},
{
generateSetType(t, 4),
7,
"aa,ab,ac",
false,
},
{
generateSetType(t, 3),
8,
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertNomsValueToValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, output)
}
})
}
}
func TestSetConvertValueToNomsValue(t *testing.T) {
tests := []struct {
typ *setType
input interface{}
output types.Uint
expectedErr bool
}{
{
generateSetType(t, 4),
"aa,ab",
3,
false,
},
{
generateSetType(t, 7),
uint64(3),
3,
false,
},
{
generateSetType(t, 3),
true,
0,
true,
},
{
generateSetType(t, 10),
time.Unix(137849, 0),
0,
true,
},
{
generateSetType(t, 5),
complex128(14i),
0,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ConvertValueToNomsValue(test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}
func TestSetFormatValue(t *testing.T) {
tests := []struct {
typ *setType
input types.Uint
output string
expectedErr bool
}{
{
generateSetType(t, 2),
0,
"",
false,
},
{
generateSetType(t, 3),
1,
"aa",
false,
},
{
generateSetType(t, 5),
2,
"ab",
false,
},
{
generateSetType(t, 8),
3,
"aa,ab",
false,
},
{
generateSetType(t, 7),
4,
"ac",
false,
},
{
generateSetType(t, 4),
7,
"aa,ab,ac",
false,
},
{
generateSetType(t, 3),
8,
"",
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.FormatValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, *output)
}
})
}
}
func TestSetParseValue(t *testing.T) {
tests := []struct {
typ *setType
input string
output types.Uint
expectedErr bool
}{
{
generateSetType(t, 2),
"",
0,
false,
},
{
generateSetType(t, 3),
"aa",
1,
false,
},
{
generateSetType(t, 5),
"ab",
2,
false,
},
{
generateSetType(t, 8),
"aa,ab",
3,
false,
},
{
generateSetType(t, 7),
"ac",
4,
false,
},
{
generateSetType(t, 4),
"aa,ab,ac",
7,
false,
},
{
generateSetType(t, 3),
"ad",
0,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v %v`, test.typ.String(), test.input), func(t *testing.T) {
output, err := test.typ.ParseValue(&test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}

View File

@@ -34,9 +34,8 @@ var TimeType = &timeType{sql.Time}
// ConvertNomsValueToValue implements TypeInfo interface.
func (ti *timeType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {
//TODO: expose the MySQL type's microsecond implementation and persist that to disk? Enables sorting
if val, ok := v.(types.String); ok {
return string(val), nil
if val, ok := v.(types.Int); ok {
return ti.sqlTimeType.Unmarshal(int64(val)), nil
}
if _, ok := v.(types.Null); ok || v == nil {
return nil, nil
@@ -49,15 +48,11 @@ func (ti *timeType) ConvertValueToNomsValue(v interface{}) (types.Value, error)
if v == nil {
return types.NullValue, nil
}
strVal, err := ti.sqlTimeType.Convert(v)
val, err := ti.sqlTimeType.Marshal(v)
if err != nil {
return nil, err
}
val, ok := strVal.(string)
if ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert value "%v" of type "%T" as it is invalid`, ti.String(), v, v)
return types.Int(val), nil
}
// Equals implements TypeInfo interface.
@@ -71,14 +66,18 @@ func (ti *timeType) Equals(other TypeInfo) bool {
// FormatValue implements TypeInfo interface.
func (ti *timeType) FormatValue(v types.Value) (*string, error) {
if val, ok := v.(types.String); ok {
res := string(val)
return &res, nil
}
if _, ok := v.(types.Null); ok || v == nil {
return nil, nil
}
return nil, fmt.Errorf(`"%v" cannot convert NomsKind "%v" to a string`, ti.String(), v.Kind())
strVal, err := ti.ConvertNomsValueToValue(v)
if err != nil {
return nil, err
}
val, ok := strVal.(string)
if !ok {
return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v)
}
return &val, nil
}
// GetTypeIdentifier implements TypeInfo interface.
@@ -99,7 +98,7 @@ func (ti *timeType) IsValid(v types.Value) bool {
// NomsKind implements TypeInfo interface.
func (ti *timeType) NomsKind() types.NomsKind {
return types.StringKind
return types.IntKind
}
// ParseValue implements TypeInfo interface.
@@ -107,14 +106,11 @@ func (ti *timeType) ParseValue(str *string) (types.Value, error) {
if str == nil || *str == "" {
return types.NullValue, nil
}
strVal, err := ti.sqlTimeType.Convert(*str)
val, err := ti.sqlTimeType.Marshal(*str)
if err != nil {
return nil, err
}
if val, ok := strVal.(string); ok {
return types.String(val), nil
}
return nil, fmt.Errorf(`"%v" cannot convert the string "%v" to a value`, ti.String(), str)
return types.Int(val), nil
}
// String implements TypeInfo interface.

View File

@@ -0,0 +1,230 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package typeinfo
import (
"fmt"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/liquidata-inc/dolt/go/store/types"
)
func TestTimeConvertNomsValueToValue(t *testing.T) {
tests := []struct {
input types.Int
output string
expectedErr bool
}{
{
1000000,
"00:00:01",
false,
},
{
113000000,
"00:01:53",
false,
},
{
247019000000,
"68:36:59",
false,
},
{
458830485214,
"127:27:10.485214",
false,
},
{
-3020399000000,
"-838:59:59",
false,
},
{ // no integer can cause an error, values beyond the max/min are set equal to the max/min
922337203685477580,
"838:59:59",
false,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) {
output, err := TimeType.ConvertNomsValueToValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, output)
}
})
}
}
func TestTimeConvertValueToNomsValue(t *testing.T) {
tests := []struct {
input interface{}
output types.Int
expectedErr bool
}{
{
153,
113000000,
false,
},
{
1.576,
1576000,
false,
},
{
"68:36:59",
247019000000,
false,
},
{
"683659",
247019000000,
false,
},
{
"dog",
0,
true,
},
{
true,
0,
true,
},
{
time.Unix(137849, 0),
0,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) {
output, err := TimeType.ConvertValueToNomsValue(test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}
func TestTimeFormatValue(t *testing.T) {
tests := []struct {
input types.Int
output string
expectedErr bool
}{
{
1000000,
"00:00:01",
false,
},
{
113000000,
"00:01:53",
false,
},
{
247019000000,
"68:36:59",
false,
},
{
458830485214,
"127:27:10.485214",
false,
},
{
-3020399000000,
"-838:59:59",
false,
},
{
922337203685477580,
"838:59:59",
false,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) {
output, err := TimeType.FormatValue(test.input)
if test.expectedErr {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, test.output, *output)
}
})
}
}
func TestTimeParseValue(t *testing.T) {
tests := []struct {
input string
output types.Int
expectedErr bool
}{
{
"683659",
247019000000,
false,
},
{
"127:27:10.485214",
458830485214,
false,
},
{
"-838:59:59",
-3020399000000,
false,
},
{
"850:00:00",
3020399000000,
false,
},
{
"dog",
0,
true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf(`%v`, test.input), func(t *testing.T) {
output, err := TimeType.ParseValue(&test.input)
if !test.expectedErr {
require.NoError(t, err)
assert.Equal(t, test.output, output)
} else {
assert.Error(t, err)
}
})
}
}

View File

@@ -139,20 +139,12 @@ func FromSqlType(sqlType sql.Type) (TypeInfo, error) {
case sqltypes.Date:
return DateType, nil
case sqltypes.Time:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
return TimeType, nil
case sqltypes.Datetime:
return DatetimeType, nil
case sqltypes.Year:
return YearType, nil
case sqltypes.Decimal:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
decimalSQLType, ok := sqlType.(sql.DecimalType)
if !ok {
return nil, fmt.Errorf(`expected "DecimalTypeIdentifier" from SQL basetype "Decimal"`)
@@ -213,20 +205,12 @@ func FromSqlType(sqlType sql.Type) (TypeInfo, error) {
}
return &bitType{bitSQLType}, nil
case sqltypes.Enum:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
enumSQLType, ok := sqlType.(sql.EnumType)
if !ok {
return nil, fmt.Errorf(`expected "EnumTypeIdentifier" from SQL basetype "Enum"`)
}
return &enumType{enumSQLType}, nil
case sqltypes.Set:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
setSQLType, ok := sqlType.(sql.SetType)
if !ok {
return nil, fmt.Errorf(`expected "SetTypeIdentifier" from SQL basetype "Set"`)

View File

@@ -19,6 +19,7 @@ import (
"testing"
"time"
"github.com/shopspring/decimal"
"github.com/src-d/go-mysql-server/sql"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -68,8 +69,7 @@ func verifyTypeInfoArrays(t *testing.T, tiArrays [][]TypeInfo, vaArrays [][]type
// delete any types that should not be tested
delete(seenTypeInfos, UnknownTypeIdentifier)
delete(seenTypeInfos, TupleTypeIdentifier)
//TODO: determine the storage format for DecimalType and VarBinaryType
delete(seenTypeInfos, DecimalTypeIdentifier)
//TODO: determine the storage format for VarBinaryType
delete(seenTypeInfos, VarBinaryTypeIdentifier)
for _, tiArray := range tiArrays {
// no row should be empty
@@ -120,7 +120,7 @@ func testTypeInfoConvertRoundTrip(t *testing.T, tiArrays [][]TypeInfo, vaArrays
if ti == DateType { // Special case as DateType removes the hh:mm:ss
val = types.Timestamp(time.Time(val.(types.Timestamp)).Truncate(24 * time.Hour))
require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal)
} else {
} else if ti.GetTypeIdentifier() != DecimalTypeIdentifier { // Any Decimal's on-disk representation varies by precision/scale
require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal)
}
} else {
@@ -220,7 +220,7 @@ func testTypeInfoFormatParseRoundTrip(t *testing.T, tiArrays [][]TypeInfo, vaArr
if ti == DateType { // special case as DateType removes the hh:mm:ss
val = types.Timestamp(time.Time(val.(types.Timestamp)).Truncate(24 * time.Hour))
require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal)
} else {
} else if ti.GetTypeIdentifier() != DecimalTypeIdentifier { // Any Decimal's on-disk representation varies by precision/scale
require.True(t, val.Equals(outVal), "\"%v\"\n\"%v\"", val, outVal)
}
} else {
@@ -330,7 +330,7 @@ func generateTypeInfoArrays(t *testing.T) ([][]TypeInfo, [][]types.Value) {
generateBitTypes(t, 16),
{BoolType},
{DateType, DatetimeType, TimestampType},
//generateDecimalTypes(t, 16),
generateDecimalTypes(t, 16),
generateEnumTypes(t, 16),
{Float32Type, Float64Type},
{InlineBlobType},
@@ -355,16 +355,19 @@ func generateTypeInfoArrays(t *testing.T) ([][]TypeInfo, [][]types.Value) {
types.Timestamp(time.Date(2000, 2, 28, 14, 38, 43, 583395000, time.UTC)),
types.Timestamp(time.Date(2038, 1, 19, 3, 14, 7, 999999000, time.UTC)),
types.Timestamp(time.Date(9999, 12, 31, 23, 59, 59, 999999000, time.UTC))},
//{types.String("1"), types.String("-1.5"), types.String("4723245"), //Decimal
// types.String("8923583.125"), types.String("1198728394234798423466321.27349757")},
{types.String("aaaa"), types.String("aaaa,aaac"), types.String("aaag"), types.String("aaab,aaad,aaaf"), types.String("aaag,aaah")}, //Enum
{types.Float(1.0), types.Float(65513.75), types.Float(4293902592), types.Float(4.58E71), types.Float(7.172E285)}, //Float
{types.InlineBlob{0}, types.InlineBlob{21}, types.InlineBlob{1, 17}, types.InlineBlob{72, 42}, types.InlineBlob{21, 122, 236}}, //InlineBlob
{types.Int(20), types.Int(215), types.Int(237493), types.Int(2035753568), types.Int(2384384576063)}, //Int
{types.String("aa"), types.String("aa,ac"), types.String("ag"), types.String("ab,ad,af"), types.String("ag,ah")}, //Set
{types.String("00:00:00"), types.String("00:00:01"), types.String("00:01:53"), types.String("68:36:59"), types.String("127:27:10.485214")}, //Time
{types.Uint(20), types.Uint(275), types.Uint(328395), types.Uint(630257298), types.Uint(93897259874)}, //Uint
{types.UUID{3}, types.UUID{3, 13}, types.UUID{128, 238, 82, 12}, types.UUID{31, 54, 23, 13, 63, 43}, types.UUID{83, 64, 21, 14, 42, 6, 35, 7, 54, 234, 6, 32, 1, 4, 2, 4}}, //Uuid
{types.Decimal(decimal.RequireFromString("0")), //Decimal
types.Decimal(decimal.RequireFromString("-1.5")),
types.Decimal(decimal.RequireFromString("4723245")),
types.Decimal(decimal.RequireFromString("-1076416.875")),
types.Decimal(decimal.RequireFromString("198728394234798423466321.27349757"))},
{types.Uint(1), types.Uint(3), types.Uint(5), types.Uint(7), types.Uint(8)}, //Enum
{types.Float(1.0), types.Float(65513.75), types.Float(4293902592), types.Float(4.58E71), types.Float(7.172E285)}, //Float
{types.InlineBlob{0}, types.InlineBlob{21}, types.InlineBlob{1, 17}, types.InlineBlob{72, 42}, types.InlineBlob{21, 122, 236}}, //InlineBlob
{types.Int(20), types.Int(215), types.Int(237493), types.Int(2035753568), types.Int(2384384576063)}, //Int
{types.Uint(1), types.Uint(5), types.Uint(64), types.Uint(42), types.Uint(192)}, //Set
{types.Int(0), types.Int(1000000 /*"00:00:01"*/), types.Int(113000000 /*"00:01:53"*/), types.Int(247019000000 /*"68:36:59"*/), types.Int(458830485214 /*"127:27:10.485214"*/)}, //Time
{types.Uint(20), types.Uint(275), types.Uint(328395), types.Uint(630257298), types.Uint(93897259874)}, //Uint
{types.UUID{3}, types.UUID{3, 13}, types.UUID{128, 238, 82, 12}, types.UUID{31, 54, 23, 13, 63, 43}, types.UUID{83, 64, 21, 14, 42, 6, 35, 7, 54, 234, 6, 32, 1, 4, 2, 4}}, //Uuid
//{types.String([]byte{1}), types.String([]byte{42, 52}), types.String([]byte{84, 32, 13, 63, 12, 86}), //VarBinary
// types.String([]byte{1, 32, 235, 64, 32, 23, 45, 76}), types.String([]byte{123, 234, 34, 223, 76, 35, 32, 12, 84, 26, 15, 34, 65, 86, 45, 23, 43, 12, 76, 154, 234, 76, 34})},
{types.String(""), types.String("a"), types.String("abc"), //VarString

View File

@@ -22,7 +22,6 @@ import (
"github.com/src-d/go-mysql-server/sql"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"vitess.io/vitess/go/sqltypes"
"github.com/liquidata-inc/dolt/go/store/types"
)
@@ -264,26 +263,3 @@ func TestVarStringParseValue(t *testing.T) {
})
}
}
func generateVarStringTypes(t *testing.T, numOfTypes uint16) []TypeInfo {
var res []TypeInfo
loop(t, 1, 500, numOfTypes, func(i int64) {
rts := false
if i%2 == 0 {
rts = true
}
res = append(res, generateVarStringType(t, i, rts))
})
return res
}
func generateVarStringType(t *testing.T, length int64, rts bool) *varStringType {
require.True(t, length > 0)
if rts {
t, err := sql.CreateStringWithDefaults(sqltypes.Char, length)
if err == nil {
return &varStringType{t}
}
}
return &varStringType{sql.MustCreateStringWithDefaults(sqltypes.VarChar, length)}
}

View File

@@ -34,7 +34,7 @@ import (
// the targetSchema given is used to prepare all rows.
func executeSelect(ctx context.Context, dEnv *env.DoltEnv, targetSch schema.Schema, root *doltdb.RootValue, query string) ([]row.Row, schema.Schema, error) {
var err error
db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
engine, sqlCtx, err := NewTestEngine(ctx, db, root)
if err != nil {
return nil, nil, err
@@ -70,7 +70,7 @@ func executeSelect(ctx context.Context, dEnv *env.DoltEnv, targetSch schema.Sche
// Runs the query given and returns the error (if any).
func executeModify(ctx context.Context, ddb *doltdb.DoltDB, root *doltdb.RootValue, query string) (*doltdb.RootValue, error) {
db := NewDatabase("dolt", root, ddb, nil)
db := NewDatabase("dolt", root, ddb, nil, nil)
engine, sqlCtx, err := NewTestEngine(ctx, db, root)
if err != nil {

View File

@@ -37,15 +37,15 @@ import (
"github.com/liquidata-inc/dolt/go/store/hash"
)
type batchMode bool
type commitBehavior int8
var ErrInvalidTableName = errors.NewKind("Invalid table name %s. Table names must match the regular expression " + doltdb.TableNameRegexStr)
var ErrReservedTableName = errors.NewKind("Invalid table name %s. Table names beginning with `dolt_` are reserved for internal use")
var ErrSystemTableAlter = errors.NewKind("Cannot alter table %s: system tables cannot be dropped or altered")
const (
batched batchMode = true
single batchMode = false
batched commitBehavior = iota
single
)
type tableCache struct {
@@ -106,7 +106,8 @@ type Database struct {
defRoot *doltdb.RootValue
ddb *doltdb.DoltDB
rsr env.RepoStateReader
batchMode batchMode
rsw env.RepoStateWriter
batchMode commitBehavior
tc *tableCache
}
@@ -117,12 +118,13 @@ var _ sql.TableCreator = Database{}
var _ sql.TableRenamer = Database{}
// NewDatabase returns a new dolt database to use in queries.
func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) Database {
func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database {
return Database{
name: name,
defRoot: defRoot,
ddb: ddb,
rsr: rsr,
rsw: rsw,
batchMode: single,
tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)},
}
@@ -130,12 +132,13 @@ func NewDatabase(name string, defRoot *doltdb.RootValue, ddb *doltdb.DoltDB, rsr
// NewBatchedDatabase returns a new dolt database executing in batch insert mode. Integrators must call Flush() to
// commit any outstanding edits.
func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader) Database {
func NewBatchedDatabase(name string, root *doltdb.RootValue, ddb *doltdb.DoltDB, rsr env.RepoStateReader, rsw env.RepoStateWriter) Database {
return Database{
name: name,
defRoot: root,
ddb: ddb,
rsr: rsr,
rsw: rsw,
batchMode: batched,
tc: &tableCache{&sync.Mutex{}, make(map[*doltdb.RootValue]map[string]sql.Table)},
}
@@ -438,7 +441,7 @@ func (db Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) {
return nil, err
}
dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot}
dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot, db.ddb, db.rsw}
return newRoot, nil
}
}
@@ -446,15 +449,6 @@ func (db Database) GetRoot(ctx *sql.Context) (*doltdb.RootValue, error) {
// Set a new root value for the database. Can be used if the dolt working
// set value changes outside of the basic SQL execution engine.
func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error {
// Need to decide on what behavior we want here. Currently all sql-server processing is done
// in memory and is never written to disk. Can leave it like this and commit as part of a
// transaction, or something similar.
/*h, err := db.ddb.WriteRootValue(ctx, newRoot)
if err != nil {
return err
}*/
h, err := newRoot.HashOf()
if err != nil {
@@ -466,11 +460,23 @@ func (db Database) SetRoot(ctx *sql.Context, newRoot *doltdb.RootValue) error {
ctx.Session.Set(key, hashType, hashStr)
dsess := DSessFromSess(ctx.Session)
dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot}
dsess.dbRoots[db.name] = dbRoot{hashStr, newRoot, db.ddb, db.rsw}
return nil
}
// LoadRootFromRepoState loads the root value from the repo state's working hash, then calls SetRoot with the loaded
// root value.
func (db Database) LoadRootFromRepoState(ctx *sql.Context) error {
workingHash := db.rsr.WorkingHash()
root, err := db.ddb.ReadRootValue(ctx, workingHash)
if err != nil {
return err
}
return db.SetRoot(ctx, root)
}
// DropTable drops the table with the name given
func (db Database) DropTable(ctx *sql.Context, tableName string) error {
root, err := db.GetRoot(ctx)
@@ -719,7 +725,7 @@ func RegisterSchemaFragments(ctx *sql.Context, db Database, root *doltdb.RootVal
if err != nil {
parseErrors = append(parseErrors, err)
} else {
ctx.Register(db.Name(), sql.NewView(name, cv.(*plan.CreateView).Definition))
ctx.Register(db.Name(), cv.(*plan.CreateView).Definition.AsView())
}
}
r, err = iter.Next()

View File

@@ -17,12 +17,16 @@ package sqle
import (
"github.com/src-d/go-mysql-server/sql"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
)
type dbRoot struct {
hashStr string
root *doltdb.RootValue
ddb *doltdb.DoltDB
rsw env.RepoStateWriter
}
// DoltSession is the sql.Session implementation used by dolt. It is accessible through a *sql.Context instance
@@ -33,7 +37,9 @@ type DoltSession struct {
// DefaultDoltSession creates a DoltSession object with default values
func DefaultDoltSession() *DoltSession {
return &DoltSession{sql.NewBaseSession(), make(map[string]dbRoot)}
sess := &DoltSession{sql.NewBaseSession(), make(map[string]dbRoot)}
sess.Set(sql.AutoCommitSessionVar, sql.Boolean, true)
return sess
}
// NewSessionWithDefaultRoot creates a DoltSession object from a standard sql.Session and 0 or more Database objects.
@@ -49,13 +55,35 @@ func NewSessionWithDefaultRoots(sqlSess sql.Session, dbs ...Database) (*DoltSess
hashStr := h.String()
dbRoots[db.Name()] = dbRoot{hashStr: hashStr, root: defRoot}
dbRoots[db.Name()] = dbRoot{hashStr: hashStr, root: defRoot, rsw: db.rsw, ddb: db.ddb}
}
return &DoltSession{sqlSess, dbRoots}, nil
sess := &DoltSession{sqlSess, dbRoots}
sess.Set(sql.AutoCommitSessionVar, sql.Boolean, true)
return sess, nil
}
// DSessFromSess retrieves a dolt session from a standard sql.Session
func DSessFromSess(sess sql.Session) *DoltSession {
return sess.(*DoltSession)
}
func (sess *DoltSession) CommitTransaction(ctx *sql.Context) error {
currentDb := sess.GetCurrentDatabase()
if currentDb == "" {
return sql.ErrNoDatabaseSelected.New()
}
dbRoot, ok := sess.dbRoots[currentDb]
if !ok {
return sql.ErrDatabaseNotFound.New(currentDb)
}
root := dbRoot.root
h, err := dbRoot.ddb.WriteRootValue(ctx, root)
if err != nil {
return err
}
return dbRoot.rsw.SetWorkingHash(ctx, h)
}

View File

@@ -23,7 +23,6 @@ import (
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/dolt/go/store/types"
)
// IndexDriver implementation. Not ready for prime time.
@@ -96,23 +95,18 @@ type doltIndex struct {
}
func (di *doltIndex) Get(key ...interface{}) (sql.IndexLookup, error) {
taggedVals, err := keyColsToTuple(di.sch, key)
if err != nil {
return nil, err
}
return &doltIndexLookup{di, taggedVals}, nil
}
func keyColsToTuple(sch schema.Schema, key []interface{}) (row.TaggedValues, error) {
if sch.GetPKCols().Size() != len(key) {
if di.sch.GetPKCols().Size() != len(key) {
return nil, errors.New("key must specify all columns")
}
var i int
taggedVals := make(row.TaggedValues)
err := sch.GetPKCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
taggedVals[tag] = keyColToValue(key[i], col)
err := di.sch.GetPKCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
val, err := col.TypeInfo.ConvertValueToNomsValue(key[i])
if err != nil {
return true, err
}
taggedVals[tag] = val
i++
return false, nil
})
@@ -121,63 +115,7 @@ func keyColsToTuple(sch schema.Schema, key []interface{}) (row.TaggedValues, err
return nil, err
}
return taggedVals, nil
}
func keyColToValue(v interface{}, column schema.Column) types.Value {
// TODO: type conversion
switch column.Kind {
case types.BoolKind:
return types.Bool(v.(bool))
case types.IntKind:
switch i := v.(type) {
case int:
return types.Int(i)
case int8:
return types.Int(i)
case int16:
return types.Int(i)
case int32:
return types.Int(i)
case int64:
return types.Int(i)
default:
panic(fmt.Sprintf("unhandled type %T", i))
}
case types.FloatKind:
return types.Float(v.(float64))
case types.UintKind:
switch i := v.(type) {
case int:
return types.Uint(i)
case int8:
return types.Uint(i)
case int16:
return types.Uint(i)
case int32:
return types.Uint(i)
case int64:
return types.Uint(i)
case uint:
return types.Uint(i)
case uint8:
return types.Uint(i)
case uint16:
return types.Uint(i)
case uint32:
return types.Uint(i)
case uint64:
return types.Uint(i)
default:
panic(fmt.Sprintf("unhandled type %T", i))
}
case types.UUIDKind:
panic("Implement me")
case types.StringKind:
return types.String(v.(string))
default:
panic(fmt.Sprintf("unhandled type %T", v))
}
return &doltIndexLookup{di, taggedVals}, nil
}
func (*doltIndex) Has(partition sql.Partition, key ...interface{}) (bool, error) {

View File

@@ -335,7 +335,7 @@ func resetEnv(root *doltdb.RootValue) *doltdb.RootValue {
}
func sqlNewEngine(ddb *doltdb.DoltDB, root *doltdb.RootValue) (*sqle.Engine, error) {
db := dsql.NewDatabase("dolt", root, ddb, nil)
db := dsql.NewDatabase("dolt", root, ddb, nil, nil)
engine := sqle.NewDefault()
engine.AddDatabase(db)

View File

@@ -63,7 +63,7 @@ func TestSqlBatchInserts(t *testing.T) {
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
engine, sqlCtx, err := NewTestEngine(ctx, db, root)
require.NoError(t, err)
@@ -151,7 +151,7 @@ func TestSqlBatchInsertIgnoreReplace(t *testing.T) {
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
engine, sqlCtx, err := NewTestEngine(ctx, db, root)
require.NoError(t, err)
@@ -189,7 +189,7 @@ func TestSqlBatchInsertErrors(t *testing.T) {
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
engine, sqlCtx, err := NewTestEngine(ctx, db, root)
require.NoError(t, err)

View File

@@ -157,7 +157,7 @@ func TestTableEditor(t *testing.T) {
ctx := NewTestSQLCtx(context.Background())
root, _ := dEnv.WorkingRoot(context.Background())
db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
err := db.SetRoot(ctx, root)
require.NoError(t, err)
peopleTable, _, err := db.GetTableInsensitive(ctx, "people")

View File

@@ -32,7 +32,7 @@ import (
// Executes all the SQL non-select statements given in the string against the root value given and returns the updated
// root, or an error. Statements in the input string are split by `;\n`
func ExecuteSql(dEnv *env.DoltEnv, root *doltdb.RootValue, statements string) (*doltdb.RootValue, error) {
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState)
db := NewBatchedDatabase("dolt", root, dEnv.DoltDB, dEnv.RepoState, dEnv.RepoStateWriter())
engine, ctx, err := NewTestEngine(context.Background(), db, root)
if err != nil {
@@ -130,7 +130,7 @@ func NewTestEngine(ctx context.Context, db Database, root *doltdb.RootValue) (*s
// Executes the select statement given and returns the resulting rows, or an error if one is encountered.
// This uses the index functionality, which is not ready for prime time. Use with caution.
func ExecuteSelect(ddb *doltdb.DoltDB, root *doltdb.RootValue, query string) ([]sql.Row, error) {
db := NewDatabase("dolt", root, ddb, nil)
db := NewDatabase("dolt", root, ddb, nil, nil)
engine, ctx, err := NewTestEngine(context.Background(), db, root)
if err != nil {
return nil, err

View File

@@ -96,6 +96,11 @@ func (nmu *NomsMapUpdater) GetSchema() schema.Schema {
// WriteRow will write a row to a table
func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error {
return nmu.WriteEdit(ctx, r.NomsMapKey(nmu.sch), r.NomsMapValue(nmu.sch))
}
// WriteEdit will write an edit to a table's edit accumulator
func (nmu *NomsMapUpdater) WriteEdit(ctx context.Context, pk types.LesserValuable, fieldVals types.Valuable) error {
if nmu.acc == nil {
return errors.New("Attempting to write after closing.")
}
@@ -105,9 +110,6 @@ func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error {
}
err := func() error {
pk := r.NomsMapKey(nmu.sch)
fieldVals := r.NomsMapValue(nmu.sch)
nmu.acc.AddEdit(pk, fieldVals)
nmu.count++

View File

@@ -21,18 +21,25 @@ import (
"github.com/liquidata-inc/dolt/go/store/types"
)
const PRINTED_NULL = "<NULL>"
const PrintedNull = "<NULL>"
const NULL_PRINTING_STAGE = "null printing"
const NullPrintingStage = "null printing"
// NullPrinter is a utility to convert nil values in rows to a string representation.
type NullPrinter struct {
Sch schema.Schema
Sch schema.Schema
nullStr string
}
// NewNullPrinter returns a new null printer for the schema given, which must be string-typed (untyped).
func NewNullPrinter(sch schema.Schema) *NullPrinter {
return &NullPrinter{Sch: sch}
return &NullPrinter{Sch: sch, nullStr: PrintedNull}
}
// NewNullPrinterWithNullString returns a new null printer for the schema given, which must be string-typed, using the
// string given as the value to print for nulls.
func NewNullPrinterWithNullString(sch schema.Schema, nullStr string) *NullPrinter {
return &NullPrinter{Sch: sch, nullStr: nullStr}
}
// Function to convert any nil values for a row with the schema given to a string representation. Used as the transform
@@ -44,7 +51,7 @@ func (np *NullPrinter) ProcessRow(inRow row.Row, props pipeline.ReadableMap) (ro
if !types.IsNull(val) {
taggedVals[tag] = val
} else {
taggedVals[tag] = types.String(PRINTED_NULL)
taggedVals[tag] = types.String(np.nullStr)
}
return false, nil

View File

@@ -84,32 +84,42 @@ func (ap *ArgParser) SupportOption(opt *Option) {
}
// Adds support for a new flag (argument with no value). See SupportOpt for details on params.
func (ap *ArgParser) SupportsFlag(name, abbrev, desc string) {
func (ap *ArgParser) SupportsFlag(name, abbrev, desc string) *ArgParser {
opt := &Option{name, abbrev, "", OptionalFlag, desc, nil}
ap.SupportOption(opt)
return ap
}
// Adds support for a new string argument with the description given. See SupportOpt for details on params.
func (ap *ArgParser) SupportsString(name, abbrev, valDesc, desc string) {
func (ap *ArgParser) SupportsString(name, abbrev, valDesc, desc string) *ArgParser {
opt := &Option{name, abbrev, valDesc, OptionalValue, desc, nil}
ap.SupportOption(opt)
return ap
}
func (ap *ArgParser) SupportsValidatedString(name, abbrev, valDesc, desc string, validator ValidationFunc) {
func (ap *ArgParser) SupportsValidatedString(name, abbrev, valDesc, desc string, validator ValidationFunc) *ArgParser {
opt := &Option{name, abbrev, valDesc, OptionalValue, desc, validator}
ap.SupportOption(opt)
return ap
}
// Adds support for a new uint argument with the description given. See SupportOpt for details on params.
func (ap *ArgParser) SupportsUint(name, abbrev, valDesc, desc string) {
func (ap *ArgParser) SupportsUint(name, abbrev, valDesc, desc string) *ArgParser {
opt := &Option{name, abbrev, valDesc, OptionalValue, desc, isUintStr}
ap.SupportOption(opt)
return ap
}
// Adds support for a new int argument with the description given. See SupportOpt for details on params.
func (ap *ArgParser) SupportsInt(name, abbrev, valDesc, desc string) {
func (ap *ArgParser) SupportsInt(name, abbrev, valDesc, desc string) *ArgParser {
opt := &Option{name, abbrev, valDesc, OptionalValue, desc, isIntStr}
ap.SupportOption(opt)
return ap
}
func splitOption(optStr string) (string, *string) {
@@ -135,7 +145,7 @@ func splitOption(optStr string) (string, *string) {
// methods. Any unrecognized arguments or incorrect types will result in an appropriate error being returned. If the
// universal --help or -h flag is found, an ErrHelp error is returned.
func (ap *ArgParser) Parse(args []string) (*ArgParseResults, error) {
var list []string
list := make([]string, 0, 16)
results := make(map[string]string)
i := 0

View File

@@ -0,0 +1,92 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package argparser
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestArgParser(t *testing.T) {
tests := []struct {
ap *ArgParser
args []string
expectedErr error
expectedOptions map[string]string
expectedArgs []string
}{
{
NewArgParser(),
[]string{},
nil,
map[string]string{},
[]string{},
},
{
NewArgParser(),
[]string{"arg1", "arg2"},
nil,
map[string]string{},
[]string{"arg1", "arg2"},
},
{
NewArgParser(),
[]string{"--unknown_flag"},
UnknownArgumentParam{"unknown_flag"},
map[string]string{},
[]string{},
},
{
NewArgParser(),
[]string{"--help"},
ErrHelp,
map[string]string{},
[]string{},
},
{
NewArgParser(),
[]string{"-h"},
ErrHelp,
map[string]string{},
[]string{},
},
{
NewArgParser(),
[]string{"help"},
nil,
map[string]string{},
[]string{"help"},
},
{
NewArgParser().SupportsString("param", "p", "", ""),
[]string{"--param", "value", "arg1"},
nil,
map[string]string{"param": "value"},
[]string{"arg1"},
},
}
for _, test := range tests {
apr, err := test.ap.Parse(test.args)
require.Equal(t, test.expectedErr, err)
if err == nil {
assert.Equal(t, test.expectedOptions, apr.options)
assert.Equal(t, test.expectedArgs, apr.args)
}
}
}

View File

@@ -0,0 +1,67 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package set
import "sort"
type Uint64Set struct {
uints map[uint64]interface{}
}
func NewUint64Set(uints []uint64) *Uint64Set {
s := &Uint64Set{make(map[uint64]interface{}, len(uints))}
for _, b := range uints {
s.uints[b] = emptyInstance
}
return s
}
func (us *Uint64Set) Contains(i uint64) bool {
_, present := us.uints[i]
return present
}
func (us *Uint64Set) ContainsAll(uints []uint64) bool {
for _, b := range uints {
if _, present := us.uints[b]; !present {
return false
}
}
return true
}
func (us *Uint64Set) Add(i uint64) {
us.uints[i] = emptyInstance
}
func (us *Uint64Set) Remove(i uint64) {
delete(us.uints, i)
}
func (us *Uint64Set) AsSlice() []uint64 {
sl := make([]uint64, 0, us.Size())
for k := range us.uints {
sl = append(sl, k)
}
sort.Slice(sl, func(i, j int) bool { return sl[i] < sl[j] })
return sl
}
func (us *Uint64Set) Size() int {
return len(us.uints)
}

View File

@@ -0,0 +1,64 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package set
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNewUint64Set(t *testing.T) {
initData := []uint64{0, 1, 2, 3}
us := NewUint64Set(initData)
// test .Size()
assert.Equal(t, 4, us.Size())
// test .Contains()
for _, id := range initData {
assert.True(t, us.Contains(id))
}
assert.False(t, us.Contains(19))
// test .ContainsAll()
assert.True(t, us.ContainsAll([]uint64{0, 1}))
assert.False(t, us.ContainsAll([]uint64{0, 1, 2, 19}))
// test .Add()
us.Add(6)
assert.True(t, us.Contains(6))
assert.Equal(t, 5, us.Size())
for _, id := range initData {
assert.True(t, us.Contains(id))
}
assert.True(t, us.ContainsAll(append(initData, 6)))
// test .Remove()
us.Remove(0)
assert.False(t, us.Contains(0))
assert.Equal(t, 4, us.Size())
us.Remove(19)
assert.Equal(t, 4, us.Size())
// test .AsSlice()
s := us.AsSlice()
assert.Equal(t, []uint64{1, 2, 3, 6}, s)
us.Add(4)
s = us.AsSlice()
assert.Equal(t, []uint64{1, 2, 3, 4, 6}, s)
}

109
go/store/types/decimal.go Normal file
View File

@@ -0,0 +1,109 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"context"
"github.com/shopspring/decimal"
"github.com/liquidata-inc/dolt/go/store/hash"
)
type Decimal decimal.Decimal
func (v Decimal) Value(ctx context.Context) (Value, error) {
return v, nil
}
func (v Decimal) Equals(other Value) bool {
v2, ok := other.(Decimal)
if !ok {
return false
}
return decimal.Decimal(v).Equal(decimal.Decimal(v2))
}
func (v Decimal) Less(nbf *NomsBinFormat, other LesserValuable) (bool, error) {
if v2, ok := other.(Decimal); ok {
return decimal.Decimal(v).LessThan(decimal.Decimal(v2)), nil
}
return DecimalKind < other.Kind(), nil
}
func (v Decimal) Hash(nbf *NomsBinFormat) (hash.Hash, error) {
return getHash(v, nbf)
}
func (v Decimal) isPrimitive() bool {
return true
}
func (v Decimal) WalkValues(ctx context.Context, cb ValueCallback) error {
return nil
}
func (v Decimal) WalkRefs(nbf *NomsBinFormat, cb RefCallback) error {
return nil
}
func (v Decimal) typeOf() (*Type, error) {
return PrimitiveTypeMap[DecimalKind], nil
}
func (v Decimal) Kind() NomsKind {
return DecimalKind
}
func (v Decimal) valueReadWriter() ValueReadWriter {
return nil
}
func (v Decimal) writeTo(w nomsWriter, nbf *NomsBinFormat) error {
encodedDecimal, err := decimal.Decimal(v).GobEncode()
if err != nil {
return err
}
err = DecimalKind.writeTo(w, nbf)
if err != nil {
return err
}
w.writeUint16(uint16(len(encodedDecimal)))
w.writeRaw(encodedDecimal)
return nil
}
func (v Decimal) readFrom(nbf *NomsBinFormat, b *binaryNomsReader) (Value, error) {
size := uint32(b.readUint16())
db := b.readBytes(size)
dec := decimal.Decimal{}
err := dec.GobDecode(db)
if err != nil {
return nil, err
}
return Decimal(dec), nil
}
func (v Decimal) skip(nbf *NomsBinFormat, b *binaryNomsReader) {
size := uint32(b.readUint16())
b.skipBytes(size)
}
func (v Decimal) HumanReadableString() string {
return decimal.Decimal(v).String()
}

View File

@@ -0,0 +1,34 @@
// Copyright 2020 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"testing"
"github.com/shopspring/decimal"
"github.com/stretchr/testify/require"
)
func TestDecimalLibraryEncoding(t *testing.T) {
expectedBytes := []byte{255, 255, 255, 250, 3, 25, 222, 110, 95, 84, 132}
dec := decimal.RequireFromString("-28443125.175428")
bytes, err := dec.GobEncode()
require.NoError(t, err)
require.Equal(t, expectedBytes, bytes)
expectedDec := decimal.Decimal{}
err = expectedDec.GobDecode(expectedBytes)
require.NoError(t, err)
require.True(t, expectedDec.Equal(dec))
}

View File

@@ -31,6 +31,7 @@ import (
"time"
"github.com/google/uuid"
"github.com/shopspring/decimal"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
@@ -1668,6 +1669,30 @@ func TestMapOrdering(t *testing.T) {
Timestamp(time.Unix(9000, 0).UTC()),
},
)
testMapOrder(assert, vrw,
PrimitiveTypeMap[DecimalKind], PrimitiveTypeMap[StringKind],
[]Value{
Decimal(decimal.RequireFromString("-99.125434")), String("unused"),
Decimal(decimal.RequireFromString("482.124")), String("unused"),
Decimal(decimal.RequireFromString("858093.12654")), String("unused"),
Decimal(decimal.RequireFromString("1")), String("unused"),
Decimal(decimal.RequireFromString("-99.125432")), String("unused"),
Decimal(decimal.RequireFromString("0")), String("unused"),
Decimal(decimal.RequireFromString("-123845")), String("unused"),
Decimal(decimal.RequireFromString("-99.125433")), String("unused"),
},
[]Value{
Decimal(decimal.RequireFromString("-123845")),
Decimal(decimal.RequireFromString("-99.125434")),
Decimal(decimal.RequireFromString("-99.125433")),
Decimal(decimal.RequireFromString("-99.125432")),
Decimal(decimal.RequireFromString("0")),
Decimal(decimal.RequireFromString("1")),
Decimal(decimal.RequireFromString("482.124")),
Decimal(decimal.RequireFromString("858093.12654")),
},
)
}
func TestMapEmpty(t *testing.T) {

View File

@@ -54,6 +54,7 @@ const (
TupleKind
InlineBlobKind
TimestampKind
DecimalKind
UnknownKind NomsKind = 255
)
@@ -79,6 +80,7 @@ var KindToType = map[NomsKind]Value{
TupleKind: EmptyTuple(Format_7_18),
InlineBlobKind: InlineBlob{},
TimestampKind: Timestamp{},
DecimalKind: Decimal{},
}
var KindToTypeSlice []Value
@@ -105,6 +107,7 @@ var KindToString = map[NomsKind]string{
TupleKind: "Tuple",
InlineBlobKind: "InlineBlob",
TimestampKind: "Timestamp",
DecimalKind: "Decimal",
}
// String returns the name of the kind.

View File

@@ -73,6 +73,7 @@ func TestTypeRefDescribe(t *testing.T) {
assert.Equal("Int", mustString(PrimitiveTypeMap[IntKind].Describe(context.Background())))
assert.Equal("Uint", mustString(PrimitiveTypeMap[UintKind].Describe(context.Background())))
assert.Equal("InlineBlob", mustString(PrimitiveTypeMap[InlineBlobKind].Describe(context.Background())))
assert.Equal("Decimal", mustString(PrimitiveTypeMap[DecimalKind].Describe(context.Background())))
assert.Equal("Map<String, Float>", mustString(mapType.Describe(context.Background())))
assert.Equal("Set<String>", mustString(setType.Describe(context.Background())))
@@ -93,6 +94,7 @@ func TestTypeOrdered(t *testing.T) {
assert.True(isKindOrderedByValue(PrimitiveTypeMap[IntKind].TargetKind()))
assert.True(isKindOrderedByValue(PrimitiveTypeMap[UintKind].TargetKind()))
assert.True(isKindOrderedByValue(PrimitiveTypeMap[InlineBlobKind].TargetKind()))
assert.True(isKindOrderedByValue(PrimitiveTypeMap[DecimalKind].TargetKind()))
assert.True(isKindOrderedByValue(TupleKind))
assert.False(isKindOrderedByValue(PrimitiveTypeMap[BlobKind].TargetKind()))

View File

@@ -6,6 +6,11 @@ set -o pipefail
script_dir=$(dirname "$0")
cd $script_dir/../..
docker run --rm -v `pwd`:/src golang:1.14.2-buster /bin/bash -c '
set -e
set -o pipefail
apt-get update && apt-get install -y zip
cd /src
BINS="dolt git-dolt git-dolt-smudge"
OSES="windows linux darwin"
ARCHS="386 amd64"
@@ -34,8 +39,9 @@ done
render_install_sh() {
local parsed=(`grep "Version = " ./cmd/dolt/dolt.go`)
local DOLT_VERSION=`eval echo ${parsed[2]}`
sed 's|__DOLT_VERSION__|'"$DOLT_VERSION"'|' utils/publishrelease/install.sh
sed '\''s|__DOLT_VERSION__|'\''"$DOLT_VERSION"'\''|'\'' utils/publishrelease/install.sh
}
render_install_sh > out/install.sh
chmod 755 out/install.sh
'

View File

@@ -76,6 +76,7 @@ enum ClientEventType {
BLAME = 45;
CREDS_CHECK = 46;
CREDS_USE = 47;
CREDS_IMPORT = 48;
}
enum MetricID {