mirror of
https://github.com/dolthub/dolt.git
synced 2026-01-06 00:39:40 -06:00
Moved most commands to use SQL, added CREATE TABLE LIKE, and removed tags from comments.
This commit is contained in:
committed by
Daylon Wilkins
parent
a291c6736c
commit
39979b8f5b
@@ -591,23 +591,23 @@ DELIM
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "test @ working" ]] || false
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT 'tag:0'" ]] || false
|
||||
[[ "$output" =~ "\`c1\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT COMMENT 'tag:2'" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT COMMENT 'tag:3'" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT COMMENT 'tag:4'" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT COMMENT 'tag:5'" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
run dolt schema show test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "test @ working" ]] || false
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT 'tag:0'" ]] || false
|
||||
[[ "$output" =~ "\`c1\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT COMMENT 'tag:2'" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT COMMENT 'tag:3'" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT COMMENT 'tag:4'" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT COMMENT 'tag:5'" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
|
||||
@@ -24,9 +24,9 @@ teardown() {
|
||||
[[ "$output" =~ "newcolumn" ]] || false
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`b\` DATETIME COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`a\` longtext" ]] || false
|
||||
[[ "$output" =~ "\`b\` datetime" ]] || false
|
||||
run dolt sql -q "select * from abc order by pk asc"
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "${lines[3]}" =~ " 1 " ]] || false
|
||||
@@ -35,9 +35,9 @@ teardown() {
|
||||
dolt checkout conflict
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`b\` DATETIME COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`a\` longtext" ]] || false
|
||||
[[ "$output" =~ "\`b\` datetime" ]] || false
|
||||
run dolt sql -q "select * from abc order by pk asc"
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "${lines[3]}" =~ " 1 " ]] || false
|
||||
@@ -49,10 +49,10 @@ teardown() {
|
||||
dolt checkout newcolumn
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`b\` DATETIME COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`c\` BIGINT UNSIGNED COMMENT " ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`a\` longtext" ]] || false
|
||||
[[ "$output" =~ "\`b\` datetime" ]] || false
|
||||
[[ "$output" =~ "\`c\` bigint unsigned" ]] || false
|
||||
run dolt sql -q "select * from abc order by pk asc"
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "${lines[3]}" =~ " 1 " ]] || false
|
||||
|
||||
@@ -9,228 +9,45 @@ teardown() {
|
||||
teardown_common
|
||||
}
|
||||
|
||||
@test "Manually specifying tag numbers" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "tag:1234" ]] || false
|
||||
[[ "$output" =~ "tag:5678" ]] || false
|
||||
}
|
||||
|
||||
@test "Users cannot partially specify tag numbers" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
SQL
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "must define tags for all or none of the schema columns" ]] || false
|
||||
}
|
||||
|
||||
@test "Renaming a column should preserve the tag number" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
pk BIGINT NOT NULL,
|
||||
c1 BIGINT,
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt sql -q "alter table test rename column c1 to c0"
|
||||
run dolt schema show
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "tag:1234" ]] || false
|
||||
[[ "$output" =~ "tag:5678" ]] || false
|
||||
[[ "$output" =~ "test,c1,8201" ]] || false
|
||||
dolt sql -q "alter table test rename column c1 to c0"
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "test,c0,8201" ]] || false
|
||||
}
|
||||
|
||||
@test "Renaming a table should preserve the tag number" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
pk BIGINT NOT NULL,
|
||||
c1 BIGINT,
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "test,pk,3228" ]] || false
|
||||
[[ "$output" =~ "test,c1,8201" ]] || false
|
||||
dolt sql -q "alter table test rename to new_name"
|
||||
run dolt schema show
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "new_name" ]] || false
|
||||
[[ "$output" =~ "tag:1234" ]] || false
|
||||
[[ "$output" =~ "tag:5678" ]] || false
|
||||
}
|
||||
|
||||
@test "Reusing a tag number should fail in create table" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:1234',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "two different columns with the same tag" ]] || false
|
||||
}
|
||||
|
||||
@test "Alter table should not allow duplicate tags" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
run dolt sql -q "alter table test add column c0 bigint comment 'tag:8910'"
|
||||
run dolt schema show
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "tag:1234" ]] || false
|
||||
[[ "$output" =~ "tag:5678" ]] || false
|
||||
[[ "$output" =~ "tag:8910" ]] || false
|
||||
run dolt sql -q "alter table test add column c2 bigint comment 'tag:8910'"
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column c2, the tag 8910 was already used in table test" ]] || false
|
||||
}
|
||||
|
||||
@test "Cannot reuse tag number of deleted column" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add .
|
||||
dolt commit -m 'create table test'
|
||||
dolt sql -q 'alter table test drop column c1'
|
||||
dolt add .
|
||||
dolt commit -m 'dropped column c1'
|
||||
run dolt sql -q "alter table test add column c2 int comment 'tag:5678'"
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column c2, the tag 5678 was already used in table test" ]] || false
|
||||
}
|
||||
|
||||
@test "Cannot reuse tag number of deleted column after table rename" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add .
|
||||
dolt commit -m 'create table test'
|
||||
dolt sql -q 'alter table test drop column c1'
|
||||
dolt add .
|
||||
dolt commit -m 'dropped column c1'
|
||||
dolt sql -q 'alter table test rename to new_name'
|
||||
run dolt sql -q "alter table new_name add column c2 int comment 'tag:5678'"
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column c2, the tag 5678 was already used in table new_name" ]] || false
|
||||
}
|
||||
|
||||
@test "Cannot reuse tag number of deleted table" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE aaa (
|
||||
pk INT NOT NULL COMMENT 'tag:1234',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE bbb (
|
||||
pk INT NOT NULL COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add .
|
||||
dolt commit -m 'created tables aaa and bbb'
|
||||
dolt sql -q 'drop table aaa'
|
||||
dolt add .
|
||||
dolt commit -m 'dropped table aaa'
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE new_table (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column pk, the tag 1234 was already used in table aaa" ]] || false
|
||||
run dolt sql -q "alter table bbb add column c1 int comment 'tag:1234'"
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column c1, the tag 1234 was already used in table aaa" ]] || false
|
||||
}
|
||||
|
||||
@test "Should not be able to reuse a committed tag number on a column with a different type" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add test
|
||||
dolt commit -m "Committed test table"
|
||||
dolt sql -q "alter table test drop column c1"
|
||||
dolt add test
|
||||
dolt commit -m "Committed test table with c1 dropped"
|
||||
|
||||
# Adding the tag back with the same name and type should not be allowed
|
||||
run dolt sql -q "alter table test add column c1 bigint comment 'tag:5678'"
|
||||
[ $status -eq 1 ]
|
||||
|
||||
# Adding the tag back with a different name but same type should not be allowed
|
||||
run dolt sql -q "alter table test add column c2 bigint comment 'tag:5678'"
|
||||
[ $status -eq 1 ]
|
||||
|
||||
# Adding the tag back with a different type should error
|
||||
run dolt sql -q "alter table test add column c1 text comment 'tag:5678'"
|
||||
[ $status -ne 0 ]
|
||||
run dolt sql -q "alter table test add column c2 text comment 'tag:5678'"
|
||||
[ $status -ne 0 ]
|
||||
}
|
||||
|
||||
@test "Can drop and readd table before committing" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
run dolt sql -q 'drop table test'
|
||||
[ $status -eq 0 ]
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE new_name (
|
||||
pk INT NOT NULL COMMENT 'tag:1234',
|
||||
c1 INT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
[ $status -eq 0 ]
|
||||
run dolt schema show
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "new_name" ]] || false
|
||||
[[ "$output" =~ "tag:1234" ]] || false
|
||||
[[ "$output" =~ "tag:5678" ]] || false
|
||||
}
|
||||
|
||||
@test "Drop and create table should enforce tag reuse rules across versions" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add test
|
||||
dolt commit -m "Committed test table"
|
||||
dolt sql -q "drop table test"
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk LONGTEXT NOT NULL COMMENT 'tag:1234',
|
||||
c1 LONGTEXT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "new_name,pk,3228" ]] || false
|
||||
[[ "$output" =~ "new_name,c1,8201" ]] || false
|
||||
}
|
||||
|
||||
@test "Merging two branches that added same tag, name, type, and constraints" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:1234',
|
||||
c1 BIGINT COMMENT 'tag:5678',
|
||||
pk BIGINT NOT NULL,
|
||||
c1 BIGINT,
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt add test
|
||||
@@ -238,13 +55,13 @@ SQL
|
||||
dolt branch branch1
|
||||
dolt branch branch2
|
||||
dolt checkout branch1
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c2 bigint tag:8910"
|
||||
dolt commit -m "Added column c2 bigint"
|
||||
dolt checkout branch2
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c2 bigint tag:8910"
|
||||
dolt commit -m "Added column c2 bigint"
|
||||
dolt checkout master
|
||||
dolt merge branch1
|
||||
dolt merge branch2
|
||||
@@ -262,13 +79,13 @@ SQL
|
||||
dolt branch branch1
|
||||
dolt branch branch2
|
||||
dolt checkout branch1
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c2 bigint tag:8910"
|
||||
dolt commit -m "Added column c2 bigint"
|
||||
dolt checkout branch2
|
||||
dolt sql -q "alter table test add column c2 longtext comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 longtext"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c2 longtext tag:8910"
|
||||
dolt commit -m "Added column c2 longtext"
|
||||
dolt checkout master
|
||||
dolt merge branch1
|
||||
run dolt merge branch2
|
||||
@@ -287,13 +104,14 @@ SQL
|
||||
dolt branch branch1
|
||||
dolt branch branch2
|
||||
dolt checkout branch1
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c2 bigint tag:8910"
|
||||
dolt commit -m "Added column c2 bigint"
|
||||
dolt checkout branch2
|
||||
dolt sql -q "alter table test add column c0 bigint comment 'tag:8910'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt sql -q "alter table test rename column c2 to c0"
|
||||
dolt add test
|
||||
dolt commit -m "Added column c0 bigint tag:8910"
|
||||
dolt commit -m "Added column c0 bigint"
|
||||
dolt checkout master
|
||||
dolt merge branch1
|
||||
run dolt merge branch2
|
||||
@@ -312,25 +130,25 @@ SQL
|
||||
dolt branch branch1
|
||||
dolt branch branch2
|
||||
dolt checkout branch1
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:2'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
dolt sql -q "alter table test add column c3 double"
|
||||
dolt add test
|
||||
dolt commit -m "Added columns c2 bigint tag:8910 and c3 double to branch1"
|
||||
dolt commit -m "Added columns c2 bigint and c3 double to branch1"
|
||||
dolt checkout branch2
|
||||
dolt sql -q "alter table test add column c2 bigint comment 'tag:2'"
|
||||
dolt sql -q "alter table test add column c2 bigint"
|
||||
# column c3 will have the same tag on both branches due to deterministic tag generation
|
||||
dolt sql -q "alter table test add column c3 double"
|
||||
dolt add test
|
||||
dolt commit -m "Added columns c2 bigint tag:8910 and c3 double to branch2"
|
||||
dolt commit -m "Added columns c2 bigint and c3 double to branch2"
|
||||
dolt checkout master
|
||||
dolt merge branch1
|
||||
run dolt merge branch2
|
||||
[ $status -eq 0 ]
|
||||
run dolt schema show
|
||||
[[ "${lines[2]}" =~ "\`pk\` BIGINT NOT NULL COMMENT 'tag:0'" ]] || false
|
||||
[[ "${lines[3]}" =~ "\`c1\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ "${lines[4]}" =~ "\`c2\` BIGINT COMMENT 'tag:2'" ]] || false
|
||||
[[ "${lines[5]}" =~ "\`c3\` DOUBLE COMMENT " ]] || false
|
||||
[[ "${lines[2]}" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "${lines[3]}" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "${lines[4]}" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "${lines[5]}" =~ "\`c3\` double" ]] || false
|
||||
}
|
||||
|
||||
@test "Merging branches that both created the same table succeeds" {
|
||||
@@ -361,34 +179,8 @@ SQL
|
||||
run dolt merge branch2
|
||||
[ $status -eq 0 ]
|
||||
run dolt schema show
|
||||
[[ "${lines[2]}" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "${lines[3]}" =~ "\`c1\` BIGINT COMMENT " ]] || false
|
||||
}
|
||||
|
||||
@test "Tags must be unique across tables" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE aaa (
|
||||
pk INT NOT NULL COMMENT 'tag:1234',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE bbb (
|
||||
pk INT NOT NULL COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk INT NOT NULL COMMENT 'tag:1234',
|
||||
c1 INT COMMENT 'tag:5678',
|
||||
PRIMARY KEY (pk));
|
||||
SQL
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column pk, the tag 1234 was already used in table aaa" ]] || false
|
||||
[[ "$output" =~ "Cannot create column c1, the tag 5678 was already used in table bbb" ]] || false
|
||||
|
||||
run dolt sql -q "ALTER TABLE aaa ADD COLUMN c1 INT COMMENT 'tag:5678';"
|
||||
[ $status -ne 0 ]
|
||||
[[ "$output" =~ "Cannot create column c1, the tag 5678 was already used in table bbb" ]] || false
|
||||
[[ "${lines[2]}" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "${lines[3]}" =~ "\`c1\` bigint" ]] || false
|
||||
}
|
||||
|
||||
@test "Deterministic tag generation produces consistent results" {
|
||||
@@ -405,11 +197,12 @@ SQL
|
||||
dolt commit -m "Committed test table"
|
||||
|
||||
# If anything changes to deterministic tag generation, this will break
|
||||
run dolt schema show
|
||||
[[ "${lines[2]}" =~ "COMMENT 'tag:10458'" ]] || false
|
||||
[[ "${lines[3]}" =~ "COMMENT 'tag:5951'" ]] || false
|
||||
[[ "${lines[4]}" =~ "COMMENT 'tag:10358'" ]] || false
|
||||
[[ "${lines[5]}" =~ "COMMENT 'tag:11314'" ]] || false
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ "test1,pk1,10458" ]] || false
|
||||
[[ "$output" =~ "test1,c1,5951" ]] || false
|
||||
[[ "$output" =~ "test1,c2,10358" ]] || false
|
||||
[[ "$output" =~ "test1,c3,11314" ]] || false
|
||||
}
|
||||
|
||||
@test "dolt table import -c uses deterministic tag generation" {
|
||||
@@ -420,13 +213,12 @@ a,b,c,d,e,f
|
||||
DELIM
|
||||
run dolt table import -c ints_table data.csv
|
||||
[ $status -eq 0 ]
|
||||
dolt schema show
|
||||
run dolt schema show
|
||||
run dolt schema tags -r=csv
|
||||
[ $status -eq 0 ]
|
||||
[[ "${lines[2]}" =~ "COMMENT 'tag:6302'" ]] || false
|
||||
[[ "${lines[3]}" =~ "COMMENT 'tag:12880'" ]] || false
|
||||
[[ "${lines[4]}" =~ "COMMENT 'tag:15463'" ]] || false
|
||||
[[ "${lines[5]}" =~ "COMMENT 'tag:14526'" ]] || false
|
||||
[[ "${lines[6]}" =~ "COMMENT 'tag:5634'" ]] || false
|
||||
[[ "${lines[7]}" =~ "COMMENT 'tag:12796'" ]] || false
|
||||
[[ "$output" =~ "ints_table,pk,6302" ]] || false
|
||||
[[ "$output" =~ "ints_table,c1,12880" ]] || false
|
||||
[[ "$output" =~ "ints_table,c2,15463" ]] || false
|
||||
[[ "$output" =~ "ints_table,c3,14526" ]] || false
|
||||
[[ "$output" =~ "ints_table,c4,5634" ]] || false
|
||||
[[ "$output" =~ "ints_table,c5,12796" ]] || false
|
||||
}
|
||||
|
||||
@@ -61,15 +61,14 @@ teardown() {
|
||||
dolt checkout init
|
||||
run dolt schema show abc
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "${lines[0]}" =~ "abc @ working" ]] || false
|
||||
[[ "${lines[1]}" =~ "CREATE TABLE \`abc\` (" ]] || false
|
||||
[[ "${lines[2]}" =~ " \`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "${lines[3]}" =~ " \`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "${lines[4]}" =~ " \`b\` DOUBLE COMMENT " ]] || false
|
||||
[[ "${lines[5]}" =~ " \`w\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[6]}" =~ " \`x\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[7]}" =~ " PRIMARY KEY (\`pk\`)" ]] || false
|
||||
[[ "${lines[8]}" =~ ");" ]] || false
|
||||
[[ "${output,,}" =~ "abc @ working" ]] || false
|
||||
[[ "${output,,}" =~ "create table \`abc\` (" ]] || false
|
||||
[[ "${output,,}" =~ "\`pk\` bigint not null" ]] || false
|
||||
[[ "${output,,}" =~ "\`a\` longtext" ]] || false
|
||||
[[ "${output,,}" =~ "\`b\` double" ]] || false
|
||||
[[ "${output,,}" =~ "\`w\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "\`x\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "primary key (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@test "dolt sql 'select * from abc' on branch init" {
|
||||
@@ -94,15 +93,14 @@ teardown() {
|
||||
|
||||
run dolt schema show abc
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "${lines[0]}" =~ "abc @ working" ]] || false
|
||||
[[ "${lines[1]}" =~ "CREATE TABLE \`abc\` (" ]] || false
|
||||
[[ "${lines[2]}" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "${lines[3]}" =~ "\`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "${lines[4]}" =~ "\`b\` DOUBLE COMMENT " ]] || false
|
||||
[[ "${lines[5]}" =~ "\`x\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[6]}" =~ "\`y\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[7]}" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
[[ "${lines[8]}" =~ ");" ]] || false
|
||||
[[ "${output,,}" =~ "abc @ working" ]] || false
|
||||
[[ "${output,,}" =~ "create table \`abc\` (" ]] || false
|
||||
[[ "${output,,}" =~ "\`pk\` bigint not null" ]] || false
|
||||
[[ "${output,,}" =~ "\`a\` longtext" ]] || false
|
||||
[[ "${output,,}" =~ "\`b\` double" ]] || false
|
||||
[[ "${output,,}" =~ "\`x\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "\`y\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "primary key (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
|
||||
@@ -126,15 +124,14 @@ teardown() {
|
||||
dolt checkout other
|
||||
run dolt schema show abc
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "${lines[0]}" =~ "abc @ working" ]] || false
|
||||
[[ "${lines[1]}" =~ "CREATE TABLE \`abc\` (" ]] || false
|
||||
[[ "${lines[2]}" =~ "\`pk\` BIGINT NOT NULL COMMENT " ]] || false
|
||||
[[ "${lines[3]}" =~ "\`a\` LONGTEXT COMMENT " ]] || false
|
||||
[[ "${lines[4]}" =~ "\`b\` DOUBLE COMMENT " ]] || false
|
||||
[[ "${lines[5]}" =~ "\`w\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[6]}" =~ "\`z\` BIGINT COMMENT " ]] || false
|
||||
[[ "${lines[7]}" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
[[ "${lines[8]}" =~ ");" ]] || false
|
||||
[[ "${output,,}" =~ "abc @ working" ]] || false
|
||||
[[ "${output,,}" =~ "create table \`abc\` (" ]] || false
|
||||
[[ "${output,,}" =~ "\`pk\` bigint not null" ]] || false
|
||||
[[ "${output,,}" =~ "\`a\` longtext" ]] || false
|
||||
[[ "${output,,}" =~ "\`b\` double" ]] || false
|
||||
[[ "${output,,}" =~ "\`w\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "\`z\` bigint" ]] || false
|
||||
[[ "${output,,}" =~ "primary key (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@test "dolt sql 'select * from abc' on branch other" {
|
||||
|
||||
@@ -11,11 +11,11 @@ dolt branch no-data
|
||||
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE abc (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:0',
|
||||
a LONGTEXT COMMENT 'tag:100',
|
||||
b DOUBLE COMMENT 'tag:101',
|
||||
w BIGINT COMMENT 'tag:102',
|
||||
x BIGINT COMMENT 'tag:103',
|
||||
pk BIGINT NOT NULL,
|
||||
a LONGTEXT,
|
||||
b DOUBLE,
|
||||
w BIGINT,
|
||||
x BIGINT,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
INSERT INTO abc VALUES (0, 'asdf', 1.1, 0, 0);
|
||||
@@ -32,7 +32,7 @@ dolt sql <<SQL
|
||||
DELETE FROM abc WHERE pk=1;
|
||||
INSERT INTO abc VALUES (3, 'data', 1.1, 0, 0);
|
||||
ALTER TABLE abc DROP COLUMN w;
|
||||
ALTER TABLE abc ADD COLUMN y BIGINT COMMENT 'tag:104';
|
||||
ALTER TABLE abc ADD COLUMN y BIGINT;
|
||||
SQL
|
||||
dolt add .
|
||||
dolt commit -m "made changes to master"
|
||||
@@ -42,7 +42,7 @@ dolt sql <<SQL
|
||||
DELETE FROM abc WHERE pk=2;
|
||||
INSERT INTO abc VALUES (4, 'data', 1.1, 0, 0);
|
||||
ALTER TABLE abc DROP COLUMN x;
|
||||
ALTER TABLE abc ADD COLUMN z BIGINT COMMENT 'tag:105';
|
||||
ALTER TABLE abc ADD COLUMN z BIGINT;
|
||||
SQL
|
||||
dolt add .
|
||||
dolt commit -m "made changes to other"
|
||||
|
||||
@@ -78,35 +78,58 @@ teardown() {
|
||||
@test "cp table with invalid name" {
|
||||
run dolt table cp test1 123
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
run dolt table cp test1 dolt_docs
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
run dolt table cp test1 dolt_query_catalog
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
run dolt table cp test1 dolt_reserved
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
}
|
||||
|
||||
@test "mv table with invalid name" {
|
||||
run dolt table mv test1 123
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
run dolt table mv test1 dolt_docs
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
run dolt table mv test1 dolt_query_catalog
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
run dolt table mv test1 dolt_reserved
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not a valid table name" ]] || false
|
||||
[[ "$output" =~ "Invalid table name" ]] || false
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
}
|
||||
|
||||
@test "rm table" {
|
||||
run dolt table rm test1
|
||||
[ "$status" -eq 0 ]
|
||||
run dolt sql -q 'show tables';
|
||||
[ "$status" -eq 0 ]
|
||||
! [[ "$output" =~ "test1" ]] || false
|
||||
[[ "$output" =~ "test2" ]] || false
|
||||
}
|
||||
|
||||
@test "rm tables" {
|
||||
run dolt table rm test1 test2
|
||||
[ "$status" -eq 0 ]
|
||||
run dolt ls;
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "No tables" ]] || false
|
||||
}
|
||||
|
||||
@test "rm nonexistent table" {
|
||||
run dolt table rm abcdefz
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not found" ]] || false
|
||||
}
|
||||
|
||||
@@ -320,7 +320,7 @@ teardown() {
|
||||
[[ "${#lines[@]}" = "3" ]] || false
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v1\` BIGINT DEFAULT (pk)" ]] || false
|
||||
[[ "$output" =~ "\`v1\` bigint DEFAULT (pk)" ]] || false
|
||||
}
|
||||
|
||||
@test "default-values: Column referenced with name change" {
|
||||
@@ -339,7 +339,7 @@ teardown() {
|
||||
[[ "${#lines[@]}" = "4" ]] || false
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v2\` BIGINT DEFAULT (v1y + 1)" ]] || false
|
||||
[[ "$output" =~ "\`v2\` bigint DEFAULT (v1y + 1)" ]] || false
|
||||
}
|
||||
|
||||
@test "default-values: Invalid literal for column type" {
|
||||
|
||||
@@ -671,18 +671,17 @@ SQL
|
||||
[[ "$output" =~ "system table" ]] || false
|
||||
}
|
||||
|
||||
@test "dolt schema command only does read operations for dolt_docs" {
|
||||
@test "dolt schema command does not show dolt_docs" {
|
||||
echo "a readme" > README.md
|
||||
echo "a license" > LICENSE.md
|
||||
dolt add .
|
||||
dolt commit -m "First commit of docs"
|
||||
run dolt schema export dolt_docs export.schema
|
||||
[ "$status" -eq 0 ]
|
||||
run dolt schema import -c --pks=pk dolt_docs `batshelper 1pk5col-ints.csv`
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "reserved" ]] || false
|
||||
run dolt schema show dolt_docs
|
||||
[ "$status" -eq 0 ]
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "not found" ]] || false
|
||||
run dolt schema show
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "No tables in working set" ]] || false
|
||||
|
||||
@@ -5,21 +5,21 @@ setup() {
|
||||
setup_common
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test_int (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:0',
|
||||
c1 BIGINT COMMENT 'tag:1',
|
||||
c2 BIGINT COMMENT 'tag:2',
|
||||
c3 BIGINT COMMENT 'tag:3',
|
||||
c4 BIGINT COMMENT 'tag:4',
|
||||
c5 BIGINT COMMENT 'tag:5',
|
||||
pk BIGINT NOT NULL,
|
||||
c1 BIGINT,
|
||||
c2 BIGINT,
|
||||
c3 BIGINT,
|
||||
c4 BIGINT,
|
||||
c5 BIGINT,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
CREATE TABLE test_string (
|
||||
pk LONGTEXT NOT NULL COMMENT 'tag:6',
|
||||
c1 LONGTEXT COMMENT 'tag:7',
|
||||
c2 LONGTEXT COMMENT 'tag:8',
|
||||
c3 LONGTEXT COMMENT 'tag:9',
|
||||
c4 LONGTEXT COMMENT 'tag:10',
|
||||
c5 LONGTEXT COMMENT 'tag:11',
|
||||
pk LONGTEXT NOT NULL,
|
||||
c1 LONGTEXT,
|
||||
c2 LONGTEXT,
|
||||
c3 LONGTEXT,
|
||||
c4 LONGTEXT,
|
||||
c5 LONGTEXT,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
SQL
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
DROP TABLE IF EXISTS `test_int`;
|
||||
CREATE TABLE `test_int` (
|
||||
`pk` BIGINT NOT NULL COMMENT 'tag:0',
|
||||
`c1` BIGINT COMMENT 'tag:1',
|
||||
`c2` BIGINT COMMENT 'tag:2',
|
||||
`c3` BIGINT COMMENT 'tag:3',
|
||||
`c4` BIGINT COMMENT 'tag:4',
|
||||
`c5` BIGINT COMMENT 'tag:5',
|
||||
`pk` bigint NOT NULL,
|
||||
`c1` bigint,
|
||||
`c2` bigint,
|
||||
`c3` bigint,
|
||||
`c4` bigint,
|
||||
`c5` bigint,
|
||||
PRIMARY KEY (`pk`)
|
||||
);
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
INSERT INTO `test_int` (`pk`,`c1`,`c2`,`c3`,`c4`,`c5`) VALUES (0,1,2,3,4,5);
|
||||
|
||||
@@ -245,9 +245,9 @@ DELIM
|
||||
[ "${lines[1]}" = "0,1,2,3" ]
|
||||
run dolt schema export test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "\`c1\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c1\` float" ]] || false
|
||||
[[ "$output" =~ "\`c2\` float" ]] || false
|
||||
[[ "$output" =~ "\`c3\` float" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@@ -260,9 +260,9 @@ DELIM
|
||||
[ "${lines[1]}" = "0,1,2,3" ]
|
||||
run dolt schema export test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "\`c1\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`c1\` float" ]] || false
|
||||
[[ "$output" =~ "\`c2\` float" ]] || false
|
||||
[[ "$output" =~ "\`c3\` float" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@@ -461,12 +461,12 @@ DELIM
|
||||
run dolt schema show test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]]
|
||||
[[ "$output" =~ "\`pk\` INT" ]]
|
||||
[[ "$output" =~ "\`str\` LONGTEXT" ]]
|
||||
[[ "$output" =~ "\`int\` INT UNSIGNED" ]]
|
||||
[[ "$output" =~ "\`bool\` BIT(1)" ]]
|
||||
[[ "$output" =~ "\`float\` FLOAT" ]]
|
||||
[[ "$output" =~ "\`date\` DATE" ]]
|
||||
[[ "$output" =~ "\`time\` TIME" ]]
|
||||
[[ "$output" =~ "\`datetime\` DATETIME" ]]
|
||||
[[ "$output" =~ "\`pk\` int" ]]
|
||||
[[ "$output" =~ "\`str\` longtext" ]]
|
||||
[[ "$output" =~ "\`int\` int unsigned" ]]
|
||||
[[ "$output" =~ "\`bool\` bit(1)" ]]
|
||||
[[ "$output" =~ "\`float\` float" ]]
|
||||
[[ "$output" =~ "\`date\` date" ]]
|
||||
[[ "$output" =~ "\`time\` time" ]]
|
||||
[[ "$output" =~ "\`datetime\` datetime" ]]
|
||||
}
|
||||
|
||||
126
bats/index.bats
126
bats/index.bats
@@ -37,16 +37,16 @@ SQL
|
||||
[[ "$output" =~ "v1(v1)" ]] || false
|
||||
run dolt schema show test
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `v1` (`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: CREATE TABLE UNIQUE INDEX" {
|
||||
@test "index: CREATE TABLE UNIQUE KEY" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test(
|
||||
pk BIGINT PRIMARY KEY,
|
||||
v1 BIGINT,
|
||||
v2 BIGINT,
|
||||
UNIQUE INDEX (v1)
|
||||
UNIQUE KEY (v1)
|
||||
);
|
||||
CREATE TABLE test2(
|
||||
pk BIGINT PRIMARY KEY,
|
||||
@@ -60,14 +60,14 @@ SQL
|
||||
[[ "$output" =~ "v1(v1)" ]] || false
|
||||
run dolt schema show test
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'UNIQUE INDEX `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'UNIQUE KEY `v1` (`v1`)' ]] || false
|
||||
|
||||
run dolt index ls test2
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "v1(v1)" ]] || false
|
||||
run dolt schema show test2
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'UNIQUE INDEX `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'UNIQUE KEY `v1` (`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: CREATE TABLE INDEX named with comment" {
|
||||
@@ -84,7 +84,7 @@ SQL
|
||||
[[ "$output" =~ "idx_v1(v1, v2)" ]] || false
|
||||
run dolt schema show test
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`,`v2`)'" COMMENT 'hello there'" ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`,`v2`)'" COMMENT 'hello there'" ]] || false
|
||||
}
|
||||
|
||||
@test "index: CREATE TABLE INDEX multiple" {
|
||||
@@ -103,8 +103,8 @@ SQL
|
||||
[[ "$output" =~ "v1v2(v1, v2)" ]] || false
|
||||
run dolt schema show test
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `v1v2` (`v1`,`v2`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `v1v2` (`v1`,`v2`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: CREATE INDEX then INSERT" {
|
||||
@@ -126,7 +126,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -151,7 +151,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1, pk2 FROM twopk WHERE v2 = 61 AND v1 = 53" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,pk2" ]] || false
|
||||
@@ -178,7 +178,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -203,7 +203,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1, pk2 FROM twopk WHERE v2 = 61 AND v1 = 53" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,pk2" ]] || false
|
||||
@@ -230,7 +230,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -255,7 +255,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1, pk2 FROM twopk WHERE v2 = 61 AND v1 = 53" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,pk2" ]] || false
|
||||
@@ -272,7 +272,7 @@ SQL
|
||||
[[ "$output" =~ "v1(v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `v1` (`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: INSERT then REPLACE" {
|
||||
@@ -503,7 +503,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
|
||||
dolt sql <<SQL
|
||||
INSERT INTO twopk VALUES (1, 99, 51, 63), (2, 11, 55, 64), (3, 88, 52, 61), (4, 22, 54, 65), (5, 77, 53, 61);
|
||||
@@ -523,7 +523,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v` (`v2`,`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: CREATE INDEX with same columns" {
|
||||
@@ -538,7 +538,7 @@ SQL
|
||||
! [[ "$output" =~ "idx_bad(v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
! [[ "$output" =~ 'INDEX `idx_bad` (`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_bad` (`v1`)' ]] || false
|
||||
|
||||
dolt sql <<SQL
|
||||
INSERT INTO twopk VALUES (1, 99, 51, 63), (2, 11, 55, 64), (3, 88, 52, 61), (4, 22, 54, 65), (5, 77, 53, 61);
|
||||
@@ -551,7 +551,7 @@ SQL
|
||||
! [[ "$output" =~ "idx_bud(v2, v1)" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
! [[ "$output" =~ 'INDEX `idx_bud` (`v2`,`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_bud` (`v2`,`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: Disallow 'dolt_' name prefix" {
|
||||
@@ -574,16 +574,16 @@ SQL
|
||||
[[ "$output" =~ "idx_v1pk1(v1, pk1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
run dolt index ls twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "idx_v2v1(v2, v1)" ]] || false
|
||||
[[ "$output" =~ "idx_v1v2(v1, v2)" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
|
||||
dolt sql <<SQL
|
||||
DROP INDEX idx_v1 ON onepk;
|
||||
@@ -595,16 +595,16 @@ SQL
|
||||
! [[ "$output" =~ "idx_v1(v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt index ls twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "idx_v1v2(v1, v2)" ]] || false
|
||||
! [[ "$output" =~ "idx_v2v1(v2, v1)" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: ALTER TABLE DROP INDEX" {
|
||||
@@ -622,16 +622,16 @@ SQL
|
||||
! [[ "$output" =~ "idx_v1(v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt index ls twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "idx_v1v2(v1, v2)" ]] || false
|
||||
! [[ "$output" =~ "idx_v2v1(v2, v1)" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: ALTER TABLE RENAME INDEX" {
|
||||
@@ -649,9 +649,9 @@ SQL
|
||||
! [[ "$output" =~ "idx_v1(v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_vfirst` (`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1pk1` (`v1`,`pk1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_vfirst` (`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
}
|
||||
|
||||
@test "index: RENAME TABLE" {
|
||||
@@ -670,8 +670,8 @@ SQL
|
||||
[[ "$output" =~ "idx_v1v2(v1, v2)" ]] || false
|
||||
run dolt schema show newpk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
run dolt index cat newpk idx_v1 -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "v1,pk1" ]] || false
|
||||
@@ -712,8 +712,8 @@ SQL
|
||||
[[ "$output" =~ "idx_v1v2(v1, v2)" ]] || false
|
||||
run dolt schema show newpk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
run dolt index cat newpk idx_v1 -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "v1,pk1" ]] || false
|
||||
@@ -785,7 +785,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk_new
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk_new WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -809,7 +809,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "1" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -1686,7 +1686,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,v1,v2,v3" ]] || false
|
||||
@@ -1704,7 +1704,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show twopk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM twopk WHERE v2 = 61 AND v1 = 53" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,pk2,v1,v2,v3" ]] || false
|
||||
@@ -1738,8 +1738,8 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk_new
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk_new WHERE vnew = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,vnew,v2" ]] || false
|
||||
@@ -1773,8 +1773,8 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk_new
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk_new WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,v1,v2" ]] || false
|
||||
@@ -1803,8 +1803,8 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk WHERE vnew = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,vnew,v2" ]] || false
|
||||
@@ -1833,8 +1833,8 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,v1,v2" ]] || false
|
||||
@@ -1863,8 +1863,8 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`vnew`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`vnew`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk WHERE vnew = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,vnew,v2" ]] || false
|
||||
@@ -1889,10 +1889,10 @@ SQL
|
||||
! [[ "$output" =~ "idx_v2v1(v2, v1)" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v2` (`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'INDEX `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v2` (`v2`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v2v1` (`v2`,`v1`)' ]] || false
|
||||
! [[ "$output" =~ 'KEY `idx_v1v2` (`v1`,`v2`)' ]] || false
|
||||
run dolt index cat onepk idx_v2 -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "v2,pk1" ]] || false
|
||||
@@ -1942,7 +1942,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -1970,7 +1970,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'UNIQUE INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'UNIQUE KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -2010,7 +2010,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -2038,7 +2038,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -2063,7 +2063,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'UNIQUE INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'UNIQUE KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -2098,7 +2098,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "6" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'UNIQUE INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'UNIQUE KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT pk1 FROM onepk WHERE v1 = 77" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1" ]] || false
|
||||
@@ -2142,7 +2142,7 @@ SQL
|
||||
[[ "${#lines[@]}" = "9" ]] || false
|
||||
run dolt schema show onepk
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ 'INDEX `idx_v1` (`v1`)' ]] || false
|
||||
[[ "$output" =~ 'KEY `idx_v1` (`v1`)' ]] || false
|
||||
run dolt sql -q "SELECT * FROM onepk WHERE v1 = 55" -r=csv
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "pk1,v1,v2" ]] || false
|
||||
|
||||
@@ -42,14 +42,14 @@ teardown() {
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "test @ working" ]] || false
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT 'tag:0'" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT COMMENT 'tag:2'" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT COMMENT 'tag:3'" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT COMMENT 'tag:4'" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT COMMENT 'tag:5'" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
[[ "$output" =~ "\`c0\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ ! "$output" =~ "\`c1\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ "$output" =~ "\`c0\` bigint" ]] || false
|
||||
[[ ! "$output" =~ "\`c1\` bigint" ]] || false
|
||||
dolt sql -q "select * from test"
|
||||
}
|
||||
|
||||
@@ -61,13 +61,13 @@ teardown() {
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "test @ working" ]] || false
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL COMMENT 'tag:0'" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT COMMENT 'tag:2'" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT COMMENT 'tag:3'" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT COMMENT 'tag:4'" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT COMMENT 'tag:5'" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
[[ ! "$output" =~ "\`c1\` BIGINT COMMENT 'tag:1'" ]] || false
|
||||
[[ ! "$output" =~ "\`c1\` bigint" ]] || false
|
||||
dolt sql -q "select * from test"
|
||||
}
|
||||
|
||||
|
||||
@@ -6,24 +6,24 @@ setup() {
|
||||
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test1 (
|
||||
pk BIGINT NOT NULL COMMENT 'tag:0',
|
||||
c1 BIGINT COMMENT 'tag:1',
|
||||
c2 BIGINT COMMENT 'tag:2',
|
||||
c3 BIGINT COMMENT 'tag:3',
|
||||
c4 BIGINT COMMENT 'tag:4',
|
||||
c5 BIGINT COMMENT 'tag:5',
|
||||
pk BIGINT NOT NULL,
|
||||
c1 BIGINT,
|
||||
c2 BIGINT,
|
||||
c3 BIGINT,
|
||||
c4 BIGINT,
|
||||
c5 BIGINT,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
SQL
|
||||
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test2 (
|
||||
\`pk\` BIGINT NOT NULL COMMENT 'tag:6',
|
||||
\`int\` INT COMMENT 'tag:7',
|
||||
\`string\` TEXT COMMENT 'tag:8',
|
||||
\`boolean\` BOOLEAN COMMENT 'tag:9',
|
||||
\`float\` DOUBLE COMMENT 'tag:10',
|
||||
\`uint\` BIGINT UNSIGNED COMMENT 'tag:11',
|
||||
\`pk\` BIGINT NOT NULL,
|
||||
\`int\` INT,
|
||||
\`string\` TEXT,
|
||||
\`boolean\` BOOLEAN,
|
||||
\`float\` DOUBLE,
|
||||
\`uint\` BIGINT UNSIGNED,
|
||||
PRIMARY KEY (pk)
|
||||
);
|
||||
SQL
|
||||
@@ -82,12 +82,3 @@ teardown() {
|
||||
[[ ! "$output" =~ "working" ]] || false
|
||||
[[ ! "$output" =~ "dolt_" ]] || false
|
||||
}
|
||||
|
||||
@test "dolt schema export --with-tags" {
|
||||
run dolt schema export
|
||||
[ "$status" -eq 0 ]
|
||||
[[ ! "$output" =~ "COMMENT 'tag:" ]] || false
|
||||
run dolt schema export --with-tags
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "COMMENT 'tag:" ]] || false
|
||||
}
|
||||
|
||||
@@ -45,12 +45,12 @@ teardown() {
|
||||
[ "$status" -eq 0 ]
|
||||
[ "${#lines[@]}" -eq 10 ]
|
||||
[[ "${lines[0]}" =~ "test" ]] || false
|
||||
[[ "$output" =~ "\`pk\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c1\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c4\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c5\` INT" ]] || false
|
||||
[[ "$output" =~ "\`pk\` int" ]] || false
|
||||
[[ "$output" =~ "\`c1\` int" ]] || false
|
||||
[[ "$output" =~ "\`c2\` int" ]] || false
|
||||
[[ "$output" =~ "\`c3\` int" ]] || false
|
||||
[[ "$output" =~ "\`c4\` int" ]] || false
|
||||
[[ "$output" =~ "\`c5\` int" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@@ -101,13 +101,13 @@ DELIM
|
||||
[ "$status" -eq 0 ]
|
||||
[ "${#lines[@]}" -eq 11 ]
|
||||
[[ "${lines[0]}" =~ "test" ]] || false
|
||||
[[ "$output" =~ "\`pk\` INT" ]] || false
|
||||
[[ "$output" =~ "\`int\` INT" ]] || false
|
||||
[[ "$output" =~ "\`string\` LONGTEXT" ]] || false
|
||||
[[ "$output" =~ "\`boolean\` BIT(1)" ]] || false
|
||||
[[ "$output" =~ "\`float\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`uint\` INT" ]] || false
|
||||
[[ "$output" =~ "\`uuid\` CHAR(36) CHARACTER SET ascii COLLATE ascii_bin" ]] || false
|
||||
[[ "$output" =~ "\`pk\` int" ]] || false
|
||||
[[ "$output" =~ "\`int\` int" ]] || false
|
||||
[[ "$output" =~ "\`string\` longtext" ]] || false
|
||||
[[ "$output" =~ "\`boolean\` bit(1)" ]] || false
|
||||
[[ "$output" =~ "\`float\` float" ]] || false
|
||||
[[ "$output" =~ "\`uint\` int" ]] || false
|
||||
[[ "$output" =~ "\`uuid\` char(36) character set ascii collate ascii_bin" ]] || false
|
||||
}
|
||||
|
||||
@test "schema import with invalid names" {
|
||||
@@ -141,13 +141,13 @@ DELIM
|
||||
run dolt schema show
|
||||
[ "${#lines[@]}" -eq 11 ]
|
||||
[[ "${lines[0]}" =~ "test" ]] || false
|
||||
[[ "$output" =~ "\`pk1\` INT" ]] || false
|
||||
[[ "$output" =~ "\`pk2\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c1\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c4\` INT" ]] || false
|
||||
[[ "$output" =~ "\`c5\` INT" ]] || false
|
||||
[[ "$output" =~ "\`pk1\` int" ]] || false
|
||||
[[ "$output" =~ "\`pk2\` int" ]] || false
|
||||
[[ "$output" =~ "\`c1\` int" ]] || false
|
||||
[[ "$output" =~ "\`c2\` int" ]] || false
|
||||
[[ "$output" =~ "\`c3\` int" ]] || false
|
||||
[[ "$output" =~ "\`c4\` int" ]] || false
|
||||
[[ "$output" =~ "\`c5\` int" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk1\`,\`pk2\`)" ]] || false
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ teardown() {
|
||||
teardown_common
|
||||
}
|
||||
|
||||
@test "create a single primary key table" {
|
||||
@test "sql-create-tables: create a single primary key table" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -26,7 +26,7 @@ SQL
|
||||
[[ "$output" =~ "test" ]] || false
|
||||
}
|
||||
|
||||
@test "create a two primary key table" {
|
||||
@test "sql-create-tables: create a two primary key table" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk1 BIGINT NOT NULL,
|
||||
@@ -44,7 +44,7 @@ SQL
|
||||
[[ "$output" =~ "test" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table that uses all supported types" {
|
||||
@test "sql-create-tables: create a table that uses all supported types" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
\`pk\` BIGINT NOT NULL,
|
||||
@@ -62,7 +62,7 @@ SQL
|
||||
[[ "$output" =~ "test" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table that uses unsupported poop type" {
|
||||
@test "sql-create-tables: create a table that uses unsupported poop type" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
\`pk\` BIGINT NOT NULL,
|
||||
@@ -80,7 +80,7 @@ SQL
|
||||
[ "$status" -eq 1 ]
|
||||
}
|
||||
|
||||
@test "create a repo with two tables" {
|
||||
@test "sql-create-tables: create a repo with two tables" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test1 (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -115,7 +115,7 @@ SQL
|
||||
[[ "$output" =~ "test2" ]] || false
|
||||
}
|
||||
|
||||
@test "create a basic table (int types) using sql" {
|
||||
@test "sql-create-tables: create a basic table (int types) using sql" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT,
|
||||
@@ -134,16 +134,16 @@ SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table with sql with multiple primary keys" {
|
||||
@test "sql-create-tables: create a table with sql with multiple primary keys" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk1 BIGINT,
|
||||
@@ -161,17 +161,17 @@ SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk1\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`pk2\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`pk1\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`pk2\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk1\`,\`pk2\`)" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table using sql with not null constraint" {
|
||||
@test "sql-create-tables: create a table using sql with not null constraint" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -187,16 +187,16 @@ SQL
|
||||
run dolt schema show test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c2\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c3\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c4\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`c5\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c2\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c3\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c4\` bigint" ]] || false
|
||||
[[ "$output" =~ "\`c5\` bigint" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table using sql with a float" {
|
||||
@test "sql-create-tables: create a table using sql with a float" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -208,13 +208,13 @@ SQL
|
||||
run dolt schema show test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\` " ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` DOUBLE" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` double" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
|
||||
@test "create a table using sql with a string" {
|
||||
@test "sql-create-tables: create a table using sql with a string" {
|
||||
run dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -226,33 +226,33 @@ SQL
|
||||
run dolt schema show test
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` BIGINT NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` LONGTEXT" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` longtext" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
|
||||
|
||||
@test "create a table using sql with an unsigned int" {
|
||||
@test "sql-create-tables: create a table using sql with an unsigned int" {
|
||||
run dolt sql -q "CREATE TABLE test (pk BIGINT NOT NULL, c1 BIGINT UNSIGNED, PRIMARY KEY (pk))"
|
||||
[ "$status" -eq 0 ]
|
||||
[ -z "$output" ]
|
||||
run dolt schema show test
|
||||
[[ "$output" =~ "BIGINT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "bigint unsigned" ]] || false
|
||||
}
|
||||
|
||||
@test "create a table using sql with a boolean" {
|
||||
@test "sql-create-tables: create a table using sql with a boolean" {
|
||||
run dolt sql -q "CREATE TABLE test (pk BIGINT NOT NULL, c1 BOOLEAN, PRIMARY KEY (pk))"
|
||||
[ "$status" -eq 0 ]
|
||||
[ -z "$output" ]
|
||||
}
|
||||
|
||||
@test "create a table with a mispelled primary key" {
|
||||
@test "sql-create-tables: create a table with a mispelled primary key" {
|
||||
run dolt sql -q "CREATE TABLE test (pk BIGINT, c1 BIGINT, c2 BIGINT, PRIMARY KEY
|
||||
(pk,noexist))"
|
||||
[ "$status" -eq 1 ]
|
||||
}
|
||||
|
||||
@test "create a table with a SQL reserved word" {
|
||||
@test "sql-create-tables: create a table with a SQL reserved word" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk INT NOT NULL,
|
||||
@@ -276,7 +276,7 @@ SQL
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
@test "create a table with a SQL keyword that is not reserved" {
|
||||
@test "sql-create-tables: create a table with a SQL keyword that is not reserved" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk INT NOT NULL,
|
||||
@@ -306,7 +306,7 @@ SQL
|
||||
}
|
||||
|
||||
|
||||
@test "create two table with the same name" {
|
||||
@test "sql-create-tables: create two table with the same name" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test (
|
||||
pk BIGINT NOT NULL,
|
||||
@@ -331,4 +331,20 @@ CREATE TABLE test (
|
||||
SQL
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "already exists" ]] || false
|
||||
}
|
||||
|
||||
@test "sql-create-tables: create like" {
|
||||
dolt sql <<SQL
|
||||
CREATE TABLE test1 (
|
||||
pk bigint primary key,
|
||||
c1 bigint default 5 comment 'hi'
|
||||
);
|
||||
CREATE TABLE test2 LIKE test1;
|
||||
SQL
|
||||
run dolt schema show test2
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CREATE TABLE \`test2\`" ]] || false
|
||||
[[ "$output" =~ "\`pk\` bigint NOT NULL" ]] || false
|
||||
[[ "$output" =~ "\`c1\` bigint DEFAULT 5 COMMENT 'hi'" ]] || false
|
||||
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
|
||||
}
|
||||
@@ -313,6 +313,9 @@ SQL
|
||||
[ ! "$output" = "" ]
|
||||
|
||||
dolt diff -r sql firstbranch newbranch > query
|
||||
echo "----------------------"
|
||||
cat query
|
||||
echo "----------------------"
|
||||
dolt checkout firstbranch
|
||||
dolt sql < query
|
||||
dolt add test
|
||||
|
||||
@@ -497,12 +497,12 @@ SQL
|
||||
dolt sql -q "alter table one_pk modify column c5 bigint"
|
||||
run dolt schema show one_pk
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ '`pk` BIGINT NOT NULL COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c1` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c2` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c3` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c4` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c5` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`pk` bigint NOT NULL' ]] || false
|
||||
[[ "$output" =~ '`c1` bigint' ]] || false
|
||||
[[ "$output" =~ '`c2` bigint' ]] || false
|
||||
[[ "$output" =~ '`c3` bigint' ]] || false
|
||||
[[ "$output" =~ '`c4` bigint' ]] || false
|
||||
[[ "$output" =~ '`c5` bigint' ]] || false
|
||||
[[ "$output" =~ 'PRIMARY KEY (`pk`)' ]] || false
|
||||
}
|
||||
|
||||
@@ -511,27 +511,15 @@ SQL
|
||||
dolt sql -q "alter table one_pk change column c5 c5 bigint"
|
||||
run dolt schema show one_pk
|
||||
[ $status -eq 0 ]
|
||||
[[ "$output" =~ '`pk` BIGINT NOT NULL COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c1` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c2` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c3` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c4` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`c5` BIGINT COMMENT' ]] || false
|
||||
[[ "$output" =~ '`pk` bigint NOT NULL' ]] || false
|
||||
[[ "$output" =~ '`c1` bigint' ]] || false
|
||||
[[ "$output" =~ '`c2` bigint' ]] || false
|
||||
[[ "$output" =~ '`c3` bigint' ]] || false
|
||||
[[ "$output" =~ '`c4` bigint' ]] || false
|
||||
[[ "$output" =~ '`c5` bigint' ]] || false
|
||||
[[ "$output" =~ 'PRIMARY KEY (`pk`)' ]] || false
|
||||
}
|
||||
|
||||
@test "sql alter table modify column with tag change" {
|
||||
run dolt sql -q "alter table one_pk modify column c5 bigint comment 'tag:9999'"
|
||||
[ $status -eq 1 ]
|
||||
[[ "$output" =~ "cannot change the tag of an existing column" ]] || false
|
||||
}
|
||||
|
||||
@test "sql alter table change column with tag change" {
|
||||
run dolt sql -q "alter table one_pk change column c5 c5 bigint comment 'tag:9999'"
|
||||
[ $status -eq 1 ]
|
||||
[[ "$output" =~ "cannot change the tag of an existing column" ]] || false
|
||||
}
|
||||
|
||||
@test "sql drop table" {
|
||||
dolt sql -q "drop table one_pk"
|
||||
run dolt ls
|
||||
|
||||
122
bats/types.bats
122
bats/types.bats
@@ -42,7 +42,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` BIGINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` bigint" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 4611686018427387903);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -67,7 +67,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` BIGINT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` bigint unsigned" ]] || false
|
||||
cat <<DELIM > uint64-max.csv
|
||||
pk,v
|
||||
0, 18446744073709551615
|
||||
@@ -100,7 +100,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` BINARY(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` binary(10)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -123,7 +123,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` BIT(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` bit(10)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 511);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -149,7 +149,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` BLOB" ]] || false
|
||||
[[ "$output" =~ "\`v\` blob" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -170,7 +170,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinyint" ]] || false
|
||||
}
|
||||
|
||||
@test "types: BOOLEAN" {
|
||||
@@ -183,7 +183,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinyint" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, true);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -204,7 +204,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` CHAR(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` char(10)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -227,7 +227,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` CHAR(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` char(10)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: CHARACTER VARYING(10)" {
|
||||
@@ -240,7 +240,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DATE" {
|
||||
@@ -253,7 +253,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DATE" ]] || false
|
||||
[[ "$output" =~ "\`v\` date" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -282,7 +282,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DATETIME" ]] || false
|
||||
[[ "$output" =~ "\`v\` datetime" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -311,7 +311,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(10,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DEC(9)" {
|
||||
@@ -324,7 +324,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DEC(9,5)" {
|
||||
@@ -337,7 +337,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,5)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DECIMAL" {
|
||||
@@ -350,7 +350,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(10,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DECIMAL(9)" {
|
||||
@@ -363,7 +363,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: DECIMAL(9,5)" {
|
||||
@@ -376,7 +376,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,5)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 1234.56789);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -399,7 +399,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
|
||||
[[ "$output" =~ "\`v\` double" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (0, 1.25);"
|
||||
run dolt sql -r csv -q "SELECT * FROM test WHERE pk=0"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -433,7 +433,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
|
||||
[[ "$output" =~ "\`v\` double" ]] || false
|
||||
}
|
||||
|
||||
@test "types: ENUM('a','b','c')" {
|
||||
@@ -446,7 +446,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` ENUM('a','b','c')" ]] || false
|
||||
[[ "$output" =~ "\`v\` enum('a','b','c')" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'a');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -471,7 +471,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(10,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: FIXED(9)" {
|
||||
@@ -484,7 +484,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: FIXED(9,5)" {
|
||||
@@ -497,7 +497,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,5)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: FLOAT" {
|
||||
@@ -510,7 +510,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` FLOAT" ]] || false
|
||||
[[ "$output" =~ "\`v\` float" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (0, 1.25);"
|
||||
run dolt sql -r csv -q "SELECT * FROM test WHERE pk=0"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -553,7 +553,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` INT" ]] || false
|
||||
[[ "$output" =~ "\`v\` int" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 1073741823);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -578,7 +578,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` INT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` int unsigned" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 2147483647);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -603,7 +603,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` INT" ]] || false
|
||||
[[ "$output" =~ "\`v\` int" ]] || false
|
||||
}
|
||||
|
||||
@test "types: INTEGER UNSIGNED" {
|
||||
@@ -616,7 +616,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` INT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` int unsigned" ]] || false
|
||||
}
|
||||
|
||||
@test "types: LONG" {
|
||||
@@ -629,7 +629,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumtext" ]] || false
|
||||
}
|
||||
|
||||
@test "types: LONG VARCHAR" {
|
||||
@@ -642,7 +642,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumtext" ]] || false
|
||||
}
|
||||
|
||||
@test "types: LONGBLOB" {
|
||||
@@ -656,7 +656,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` LONGBLOB" ]] || false
|
||||
[[ "$output" =~ "\`v\` longblob" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -677,7 +677,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` LONGTEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` longtext" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -699,7 +699,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMBLOB" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumblob" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -720,7 +720,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumint" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 4194303);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -745,7 +745,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMINT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumint unsigned" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 8388607);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -770,7 +770,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` MEDIUMTEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` mediumtext" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -791,7 +791,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` char(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NATIONAL CHARACTER(10)" {
|
||||
@@ -804,7 +804,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` char(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NATIONAL CHARACTER VARYING(10)" {
|
||||
@@ -817,7 +817,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NATIONAL VARCHAR(10)" {
|
||||
@@ -830,7 +830,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NCHAR(10)" {
|
||||
@@ -843,7 +843,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` CHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` char(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NVARCHAR(10)" {
|
||||
@@ -856,7 +856,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10) character set utf8mb3 collate utf8mb3_general_ci" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NUMERIC" {
|
||||
@@ -869,7 +869,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(10,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(10,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NUMERIC(9)" {
|
||||
@@ -882,7 +882,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,0)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,0)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: NUMERIC(9,5)" {
|
||||
@@ -895,7 +895,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DECIMAL(9,5)" ]] || false
|
||||
[[ "$output" =~ "\`v\` decimal(9,5)" ]] || false
|
||||
}
|
||||
|
||||
@test "types: REAL" {
|
||||
@@ -908,7 +908,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` DOUBLE" ]] || false
|
||||
[[ "$output" =~ "\`v\` double" ]] || false
|
||||
}
|
||||
|
||||
@test "types: SET('a','b','c')" {
|
||||
@@ -921,7 +921,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` SET('a','b','c')" ]] || false
|
||||
[[ "$output" =~ "\`v\` set('a','b','c')" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'b,a');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -947,7 +947,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` SMALLINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` smallint" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 16383);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -972,7 +972,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` SMALLINT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` smallint unsigned" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 32767);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -997,7 +997,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` text" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1018,7 +1018,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TIME" ]] || false
|
||||
[[ "$output" =~ "\`v\` time" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, '11:22:33.444444');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1047,7 +1047,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TIMESTAMP" ]] || false
|
||||
[[ "$output" =~ "\`v\` timestamp" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, '2020-02-10 11:12:13.456789');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1077,7 +1077,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYBLOB" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinyblob" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1098,7 +1098,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYINT" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinyint" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 63);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1123,7 +1123,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYINT UNSIGNED" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinyint unsigned" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 127);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1148,7 +1148,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` TINYTEXT" ]] || false
|
||||
[[ "$output" =~ "\`v\` tinytext" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1170,7 +1170,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARBINARY(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` varbinary(10)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1193,7 +1193,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10)" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10)" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1216,7 +1216,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` VARCHAR(10) CHARACTER SET utf32 COLLATE utf32_general_ci" ]] || false
|
||||
[[ "$output" =~ "\`v\` varchar(10) character set utf32 collate utf32_general_ci" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 'abcdefg');"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
@@ -1239,7 +1239,7 @@ CREATE TABLE test (
|
||||
SQL
|
||||
run dolt schema show
|
||||
[ "$status" -eq "0" ]
|
||||
[[ "$output" =~ "\`v\` YEAR" ]] || false
|
||||
[[ "$output" =~ "\`v\` year" ]] || false
|
||||
dolt sql -q "INSERT INTO test VALUES (1, 1901);"
|
||||
run dolt sql -q "SELECT * FROM test"
|
||||
[ "$status" -eq "0" ]
|
||||
|
||||
@@ -138,10 +138,10 @@ func (cmd DiffCmd) createArgParser() *argparser.ArgParser {
|
||||
ap.SupportsFlag(DataFlag, "d", "Show only the data changes, do not show the schema changes (Both shown by default).")
|
||||
ap.SupportsFlag(SchemaFlag, "s", "Show only the schema changes, do not show the data changes (Both shown by default).")
|
||||
ap.SupportsFlag(SummaryFlag, "", "Show summary of data changes")
|
||||
ap.SupportsString(formatFlag, "r", "result output format", "How to format diff output. Valid values are tabular & sql. Defaults to tabular. ")
|
||||
ap.SupportsString(FormatFlag, "r", "result output format", "How to format diff output. Valid values are tabular & sql. Defaults to tabular. ")
|
||||
ap.SupportsString(whereParam, "", "column", "filters columns based on values in the diff. See {{.EmphasisLeft}}dolt diff --help{{.EmphasisRight}} for details.")
|
||||
ap.SupportsInt(limitParam, "", "record_count", "limits to the first N diffs.")
|
||||
ap.SupportsString(queryFlag, "q", "query", "diffs the results of a query at two commits")
|
||||
ap.SupportsString(QueryFlag, "q", "query", "diffs the results of a query at two commits")
|
||||
return ap
|
||||
}
|
||||
|
||||
@@ -180,22 +180,22 @@ func (cmd DiffCmd) Exec(ctx context.Context, commandStr string, args []string, d
|
||||
func parseDiffArgs(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgParseResults) (from, to *doltdb.RootValue, dArgs *diffArgs, err error) {
|
||||
dArgs = &diffArgs{}
|
||||
|
||||
if q, ok := apr.GetValue(queryFlag); ok {
|
||||
if q, ok := apr.GetValue(QueryFlag); ok {
|
||||
_, okWhere := apr.GetValue(whereParam)
|
||||
_, okLimit := apr.GetInt(limitParam)
|
||||
switch {
|
||||
case okWhere:
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, whereParam)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, whereParam)
|
||||
case okLimit:
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, limitParam)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, limitParam)
|
||||
case apr.Contains(DataFlag):
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, DataFlag)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, DataFlag)
|
||||
case apr.Contains(SchemaFlag):
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, SchemaFlag)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, SchemaFlag)
|
||||
case apr.Contains(SummaryFlag):
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, SummaryFlag)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, SummaryFlag)
|
||||
case apr.Contains(SQLFlag):
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", queryFlag, SQLFlag)
|
||||
return nil, nil, nil, fmt.Errorf("arg %s cannot be combined with arg %s", QueryFlag, SQLFlag)
|
||||
}
|
||||
dArgs.query = q
|
||||
}
|
||||
@@ -207,9 +207,9 @@ func parseDiffArgs(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgPar
|
||||
dArgs.diffParts = SchemaOnlyDiff
|
||||
}
|
||||
|
||||
f, _ := apr.GetValue(formatFlag)
|
||||
f, _ := apr.GetValue(FormatFlag)
|
||||
switch strings.ToLower(f) {
|
||||
case "tablular":
|
||||
case "tabular":
|
||||
dArgs.diffOutput = TabularDiffOutput
|
||||
case "sql":
|
||||
dArgs.diffOutput = SQLDiffOutput
|
||||
@@ -455,12 +455,12 @@ func tabularSchemaDiff(ctx context.Context, td diff.TableDelta, fromSchemas, toS
|
||||
dff := colDiffs[tag]
|
||||
switch dff.DiffType {
|
||||
case diff.SchDiffNone:
|
||||
cli.Println(sqlfmt.FmtColWithTag(4, 0, 0, *dff.New))
|
||||
cli.Println(sqlfmt.FmtCol(4, 0, 0, *dff.New))
|
||||
case diff.SchDiffAdded:
|
||||
cli.Println(color.GreenString("+ " + sqlfmt.FmtColWithTag(2, 0, 0, *dff.New)))
|
||||
cli.Println(color.GreenString("+ " + sqlfmt.FmtCol(2, 0, 0, *dff.New)))
|
||||
case diff.SchDiffRemoved:
|
||||
// removed from sch2
|
||||
cli.Println(color.RedString("- " + sqlfmt.FmtColWithTag(2, 0, 0, *dff.Old)))
|
||||
cli.Println(color.RedString("- " + sqlfmt.FmtCol(2, 0, 0, *dff.Old)))
|
||||
case diff.SchDiffModified:
|
||||
// changed in sch2
|
||||
n0, t0 := dff.Old.Name, dff.Old.TypeInfo.ToSqlType().String()
|
||||
@@ -538,7 +538,7 @@ func sqlSchemaDiff(ctx context.Context, td diff.TableDelta, toSchemas map[string
|
||||
if td.IsDrop() {
|
||||
cli.Println(sqlfmt.DropTableStmt(td.FromName))
|
||||
} else if td.IsAdd() {
|
||||
cli.Println(sqlfmt.CreateTableStmtWithTags(td.ToName, toSch, td.ToFks, nil))
|
||||
cli.Println(sqlfmt.CreateTableStmt(td.ToName, toSch, td.ToFks, nil))
|
||||
} else {
|
||||
if td.FromName != td.ToName {
|
||||
cli.Println(sqlfmt.RenameTableStmt(td.FromName, td.ToName))
|
||||
|
||||
@@ -20,13 +20,14 @@ import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
|
||||
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
)
|
||||
@@ -39,10 +40,6 @@ var schExportDocs = cli.CommandDocumentationContent{
|
||||
},
|
||||
}
|
||||
|
||||
const (
|
||||
withTagsFlag = "with-tags"
|
||||
)
|
||||
|
||||
type ExportCmd struct{}
|
||||
|
||||
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
|
||||
@@ -52,7 +49,7 @@ func (cmd ExportCmd) Name() string {
|
||||
|
||||
// Description returns a description of the command
|
||||
func (cmd ExportCmd) Description() string {
|
||||
return "Exports a table's schema."
|
||||
return "Exports a table's schema in SQL form."
|
||||
}
|
||||
|
||||
// CreateMarkdown creates a markdown file containing the helptext for the command at the given path
|
||||
@@ -64,8 +61,7 @@ func (cmd ExportCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string)
|
||||
func (cmd ExportCmd) createArgParser() *argparser.ArgParser {
|
||||
ap := argparser.NewArgParser()
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"table", "table whose schema is being exported."})
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"commit", "commit at which point the schema will be displayed."})
|
||||
ap.SupportsFlag(withTagsFlag, "", "Include column tags in exported schema")
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"file", "the file that the schema will be written to."})
|
||||
return ap
|
||||
}
|
||||
|
||||
@@ -122,6 +118,9 @@ func exportSchemas(ctx context.Context, apr *argparser.ArgParseResults, root *do
|
||||
var tablesToExport []string
|
||||
var err error
|
||||
if tblName != "" {
|
||||
if doltdb.HasDoltPrefix(tblName) {
|
||||
return errhand.BuildDError("%s not found", tblName).Build()
|
||||
}
|
||||
tablesToExport = []string{tblName}
|
||||
} else {
|
||||
tablesToExport, err = doltdb.GetNonSystemTableNames(ctx, root)
|
||||
@@ -131,7 +130,7 @@ func exportSchemas(ctx context.Context, apr *argparser.ArgParseResults, root *do
|
||||
}
|
||||
|
||||
for _, tn := range tablesToExport {
|
||||
verr := exportTblSchema(ctx, tn, root, wr, apr.Contains(withTagsFlag))
|
||||
verr := exportTblSchema(ctx, tn, root, wr)
|
||||
if verr != nil {
|
||||
return verr
|
||||
}
|
||||
@@ -140,42 +139,11 @@ func exportSchemas(ctx context.Context, apr *argparser.ArgParseResults, root *do
|
||||
return nil
|
||||
}
|
||||
|
||||
func exportTblSchema(ctx context.Context, tblName string, root *doltdb.RootValue, wr io.Writer, withTags bool) errhand.VerboseError {
|
||||
if has, err := root.HasTable(ctx, tblName); err != nil {
|
||||
return errhand.BuildDError("unable to read from database").AddCause(err).Build()
|
||||
} else if !has {
|
||||
return errhand.BuildDError("table %s not found", tblName).Build()
|
||||
}
|
||||
|
||||
tbl, _, err := root.GetTable(ctx, tblName)
|
||||
|
||||
func exportTblSchema(ctx context.Context, tblName string, root *doltdb.RootValue, wr io.Writer) errhand.VerboseError {
|
||||
sqlCtx, engine, _ := dsqle.PrepareCreateTableStmt(ctx, root)
|
||||
stmt, err := dsqle.GetCreateTableStmt(sqlCtx, engine, tblName)
|
||||
if err != nil {
|
||||
return errhand.BuildDError("unable to get table").AddCause(err).Build()
|
||||
}
|
||||
|
||||
sch, err := tbl.GetSchema(ctx)
|
||||
|
||||
if err != nil {
|
||||
return errhand.BuildDError("error: failed to get schema for table %s", tblName).AddCause(err).Build()
|
||||
}
|
||||
|
||||
fkc, err := root.GetForeignKeyCollection(ctx)
|
||||
|
||||
if err != nil {
|
||||
return errhand.BuildDError("error: failed to read foreign key struct").AddCause(err).Build()
|
||||
}
|
||||
|
||||
declaresFk, _ := fkc.KeysForTable(tblName)
|
||||
parentSchs, err := root.GetAllSchemas(ctx)
|
||||
if err != nil {
|
||||
return errhand.BuildDError("could not read schemas").AddCause(err).Build()
|
||||
}
|
||||
|
||||
var stmt string
|
||||
if withTags {
|
||||
stmt = sqlfmt.CreateTableStmtWithTags(tblName, sch, declaresFk, parentSchs)
|
||||
} else {
|
||||
stmt = sqlfmt.CreateTableStmt(tblName, sch, declaresFk, parentSchs)
|
||||
return errhand.VerboseErrorFromError(err)
|
||||
}
|
||||
|
||||
_, err = fmt.Fprintln(wr, stmt)
|
||||
|
||||
@@ -286,7 +286,7 @@ func importSchema(ctx context.Context, dEnv *env.DoltEnv, apr *argparser.ArgPars
|
||||
|
||||
tblName := impArgs.tableName
|
||||
// inferred schemas have no foreign keys
|
||||
cli.Println(sqlfmt.CreateTableStmtWithTags(tblName, sch, nil, nil))
|
||||
cli.Println(sqlfmt.CreateTableStmt(tblName, sch, nil, nil))
|
||||
|
||||
if !apr.Contains(dryRunFlag) {
|
||||
tbl, tblExists, err := root.GetTable(ctx, tblName)
|
||||
|
||||
@@ -24,6 +24,7 @@ var Commands = cli.NewSubCommandHandler("schema", "Commands for showing and impo
|
||||
ExportCmd{},
|
||||
ImportCmd{},
|
||||
ShowCmd{},
|
||||
TagsCmd{},
|
||||
})
|
||||
|
||||
// ValidateTableNameForCreate validates the given table name for creation as a user table, returning an error if the
|
||||
|
||||
@@ -25,14 +25,14 @@ import (
|
||||
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
)
|
||||
|
||||
var tblSchemaDocs = cli.CommandDocumentationContent{
|
||||
ShortDesc: "Shows the schema of one or more tables.",
|
||||
LongDesc: `{{.EmphasisLeft}}dolt table schema{{.EmphasisRight}} displays the schema of tables at a given commit. If no commit is provided the working set will be used. +
|
||||
LongDesc: `{{.EmphasisLeft}}dolt schema show{{.EmphasisRight}} displays the schema of tables at a given commit. If no commit is provided the working set will be used.
|
||||
|
||||
A list of tables can optionally be provided. If it is omitted all table schemas will be shown.`,
|
||||
Synopsis: []string{
|
||||
@@ -133,15 +133,7 @@ func printSchemas(ctx context.Context, apr *argparser.ArgParseResults, dEnv *env
|
||||
}
|
||||
}
|
||||
|
||||
fkc, err := root.GetForeignKeyCollection(ctx)
|
||||
if err != nil {
|
||||
return errhand.BuildDError("error: failed to read foreign key struct").AddCause(err).Build()
|
||||
}
|
||||
|
||||
allSchemas, err := root.GetAllSchemas(ctx)
|
||||
if err != nil {
|
||||
return errhand.BuildDError("unable to get schema").AddCause(err).Build()
|
||||
}
|
||||
sqlCtx, engine, _ := dsqle.PrepareCreateTableStmt(ctx, root)
|
||||
|
||||
var notFound []string
|
||||
for _, tblName := range tables {
|
||||
@@ -153,11 +145,12 @@ func printSchemas(ctx context.Context, apr *argparser.ArgParseResults, dEnv *env
|
||||
if !ok {
|
||||
notFound = append(notFound, tblName)
|
||||
} else {
|
||||
fks, _ := fkc.KeysForTable(tblName)
|
||||
sch := allSchemas[tblName]
|
||||
|
||||
cli.Println(bold.Sprint(tblName), "@", cmStr)
|
||||
cli.Println(sqlfmt.CreateTableStmtWithTags(tblName, sch, fks, allSchemas))
|
||||
stmt, err := dsqle.GetCreateTableStmt(sqlCtx, engine, tblName)
|
||||
if err != nil {
|
||||
return errhand.VerboseErrorFromError(err)
|
||||
}
|
||||
cli.Println(stmt)
|
||||
cli.Println()
|
||||
}
|
||||
}
|
||||
|
||||
110
go/cmd/dolt/commands/schcmds/tags.go
Normal file
110
go/cmd/dolt/commands/schcmds/tags.go
Normal file
@@ -0,0 +1,110 @@
|
||||
// Copyright 2020 Liquidata, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package schcmds
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
)
|
||||
|
||||
//SELECT table_name AS 'table', column_name AS 'column', SUBSTR(extra, 5) AS tag FROM information_schema.columns WHERE table_name = 'XXX';
|
||||
|
||||
var tblTagsDocs = cli.CommandDocumentationContent{
|
||||
ShortDesc: "Shows the column tags of one or more tables.",
|
||||
LongDesc: `{{.EmphasisLeft}}dolt schema tags{{.EmphasisRight}} displays the column tags of tables on the working set.
|
||||
|
||||
A list of tables can optionally be provided. If it is omitted then all tables will be shown. If a given table does not exist, then it is ignored.`,
|
||||
Synopsis: []string{
|
||||
"[-r {{.LessThan}}result format{{.GreaterThan}}] [{{.LessThan}}table{{.GreaterThan}}...]",
|
||||
},
|
||||
}
|
||||
|
||||
type TagsCmd struct{}
|
||||
|
||||
var _ cli.Command = TagsCmd{}
|
||||
|
||||
func (cmd TagsCmd) Name() string {
|
||||
return "tags"
|
||||
}
|
||||
|
||||
func (cmd TagsCmd) Description() string {
|
||||
return "Shows the column tags of one or more tables."
|
||||
}
|
||||
|
||||
func (cmd TagsCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
|
||||
ap := cmd.createArgParser()
|
||||
return commands.CreateMarkdown(fs, path, cli.GetCommandDocumentation(commandStr, tblTagsDocs, ap))
|
||||
}
|
||||
|
||||
func (cmd TagsCmd) createArgParser() *argparser.ArgParser {
|
||||
ap := argparser.NewArgParser()
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"table", "table(s) whose tags will be displayed."})
|
||||
ap.SupportsString(commands.FormatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv, json. Defaults to tabular.")
|
||||
return ap
|
||||
}
|
||||
|
||||
func (cmd TagsCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
|
||||
ap := cmd.createArgParser()
|
||||
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblTagsDocs, ap))
|
||||
apr := cli.ParseArgs(ap, args, help)
|
||||
|
||||
tables := apr.Args()
|
||||
if len(tables) == 0 {
|
||||
root, verr := commands.GetWorkingWithVErr(dEnv)
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
var err error
|
||||
tables, err = root.GetTableNames(ctx)
|
||||
|
||||
if err != nil {
|
||||
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to get table names.").AddCause(err).Build(), usage)
|
||||
}
|
||||
|
||||
tables = commands.RemoveDocsTbl(tables)
|
||||
if len(tables) == 0 {
|
||||
cli.Println("No tables in working set")
|
||||
return 0
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(tables); i++ {
|
||||
tables[i] = fmt.Sprintf("'%s'", tables[i])
|
||||
}
|
||||
|
||||
//TODO: implement REGEXP_SUBSTR in go-mysql-server and use it here instead of SUBSTR, as this will eventually break
|
||||
queryStr := fmt.Sprintf("SELECT table_name AS 'table', column_name AS 'column', "+
|
||||
"SUBSTR(extra, 5) AS tag FROM information_schema.columns WHERE table_name IN (%s)", strings.Join(tables, ","))
|
||||
|
||||
if formatStr, ok := apr.GetValue(commands.FormatFlag); ok {
|
||||
return commands.SqlCmd{}.Exec(ctx, "", []string{
|
||||
fmt.Sprintf(`--%s=%s`, commands.FormatFlag, formatStr),
|
||||
fmt.Sprintf(`--%s`, commands.QueryFlag),
|
||||
queryStr + ";",
|
||||
}, dEnv)
|
||||
} else {
|
||||
return commands.SqlCmd{}.Exec(ctx, "", []string{
|
||||
fmt.Sprintf(`--%s`, commands.QueryFlag),
|
||||
queryStr + ";",
|
||||
}, dEnv)
|
||||
}
|
||||
}
|
||||
@@ -84,13 +84,13 @@ By default this command uses the dolt data repository in the current working dir
|
||||
}
|
||||
|
||||
const (
|
||||
queryFlag = "query"
|
||||
formatFlag = "result-format"
|
||||
QueryFlag = "query"
|
||||
FormatFlag = "result-format"
|
||||
saveFlag = "save"
|
||||
executeFlag = "execute"
|
||||
listSavedFlag = "list-saved"
|
||||
messageFlag = "message"
|
||||
batchFlag = "batch"
|
||||
BatchFlag = "batch"
|
||||
multiDBDirFlag = "multi-db-dir"
|
||||
welcomeMsg = `# Welcome to the DoltSQL shell.
|
||||
# Statements must be terminated with ';'.
|
||||
@@ -120,13 +120,13 @@ func (cmd SqlCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) er
|
||||
func (cmd SqlCmd) createArgParser() *argparser.ArgParser {
|
||||
ap := argparser.NewArgParser()
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"commit", "Commit to run read only queries against."})
|
||||
ap.SupportsString(queryFlag, "q", "SQL query to run", "Runs a single query and exits")
|
||||
ap.SupportsString(formatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv, json. Defaults to tabular. ")
|
||||
ap.SupportsString(QueryFlag, "q", "SQL query to run", "Runs a single query and exits")
|
||||
ap.SupportsString(FormatFlag, "r", "result output format", "How to format result output. Valid values are tabular, csv, json. Defaults to tabular. ")
|
||||
ap.SupportsString(saveFlag, "s", "saved query name", "Used with --query, save the query to the query catalog with the name provided. Saved queries can be examined in the dolt_query_catalog system table.")
|
||||
ap.SupportsString(executeFlag, "x", "saved query name", "Executes a saved query with the given name")
|
||||
ap.SupportsFlag(listSavedFlag, "l", "Lists all saved queries")
|
||||
ap.SupportsString(messageFlag, "m", "saved query description", "Used with --query and --save, saves the query with the descriptive message given. See also --name")
|
||||
ap.SupportsFlag(batchFlag, "b", "batch mode, to run more than one query with --query, separated by ';'. Piping input to sql with no arguments also uses batch mode")
|
||||
ap.SupportsFlag(BatchFlag, "b", "batch mode, to run more than one query with --query, separated by ';'. Piping input to sql with no arguments also uses batch mode")
|
||||
ap.SupportsString(multiDBDirFlag, "", "directory", "Defines a directory whose subdirectories should all be dolt data repositories accessible as independent databases within ")
|
||||
return ap
|
||||
}
|
||||
@@ -159,7 +159,7 @@ func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dE
|
||||
|
||||
var verr errhand.VerboseError
|
||||
format := formatTabular
|
||||
if formatSr, ok := apr.GetValue(formatFlag); ok {
|
||||
if formatSr, ok := apr.GetValue(FormatFlag); ok {
|
||||
format, verr = getFormat(formatSr)
|
||||
if verr != nil {
|
||||
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(verr), usage)
|
||||
@@ -254,8 +254,8 @@ func (cmd SqlCmd) Exec(ctx context.Context, commandStr string, args []string, dE
|
||||
return HandleVErrAndExitCode(err.(errhand.VerboseError), usage)
|
||||
}
|
||||
|
||||
if query, queryOK := apr.GetValue(queryFlag); queryOK {
|
||||
batchMode := apr.Contains(batchFlag)
|
||||
if query, queryOK := apr.GetValue(QueryFlag); queryOK {
|
||||
batchMode := apr.Contains(BatchFlag)
|
||||
|
||||
if batchMode {
|
||||
batchInput := strings.NewReader(query)
|
||||
@@ -503,10 +503,10 @@ func getFormat(format string) (resultFormat, errhand.VerboseError) {
|
||||
}
|
||||
|
||||
func validateSqlArgs(apr *argparser.ArgParseResults) error {
|
||||
_, query := apr.GetValue(queryFlag)
|
||||
_, query := apr.GetValue(QueryFlag)
|
||||
_, save := apr.GetValue(saveFlag)
|
||||
_, msg := apr.GetValue(messageFlag)
|
||||
_, batch := apr.GetValue(batchFlag)
|
||||
_, batch := apr.GetValue(BatchFlag)
|
||||
_, list := apr.GetValue(listSavedFlag)
|
||||
_, execute := apr.GetValue(executeFlag)
|
||||
_, multiDB := apr.GetValue(multiDBDirFlag)
|
||||
|
||||
@@ -16,21 +16,13 @@ package tblcmds
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands/schcmds"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
|
||||
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/mvdata"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/rowconv"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed/noms"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
)
|
||||
@@ -44,38 +36,10 @@ If a table exists at the target location this command will fail unless the {{.Em
|
||||
All changes will be applied to the working tables and will need to be staged using {{.EmphasisLeft}}dolt add{{.EmphasisRight}} and committed using {{.EmphasisLeft}}dolt commit{{.EmphasisRight}}.
|
||||
`,
|
||||
Synopsis: []string{
|
||||
"[-f] [{{.LessThan}}commit{{.GreaterThan}}] {{.LessThan}}oldtable{{.GreaterThan}} {{.LessThan}}newtable{{.GreaterThan}}",
|
||||
"[-f] {{.LessThan}}oldtable{{.GreaterThan}} {{.LessThan}}newtable{{.GreaterThan}}",
|
||||
},
|
||||
}
|
||||
|
||||
type copyOptions struct {
|
||||
oldTblName string
|
||||
newTblName string
|
||||
contOnErr bool
|
||||
force bool
|
||||
Src mvdata.DataLocation
|
||||
Dest mvdata.DataLocation
|
||||
}
|
||||
|
||||
func (co copyOptions) checkOverwrite(ctx context.Context, root *doltdb.RootValue, fs filesys.ReadableFS) (bool, error) {
|
||||
if co.force {
|
||||
return false, nil
|
||||
}
|
||||
return co.Dest.Exists(ctx, root, fs)
|
||||
}
|
||||
|
||||
func (co copyOptions) WritesToTable() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (co copyOptions) SrcName() string {
|
||||
return co.oldTblName
|
||||
}
|
||||
|
||||
func (co copyOptions) DestName() string {
|
||||
return co.newTblName
|
||||
}
|
||||
|
||||
type CpCmd struct{}
|
||||
|
||||
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
|
||||
@@ -96,7 +60,6 @@ func (cmd CpCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) err
|
||||
|
||||
func (cmd CpCmd) createArgParser() *argparser.ArgParser {
|
||||
ap := argparser.NewArgParser()
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"commit", "The state at which point the table will be copied."})
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"oldtable", "The table being copied."})
|
||||
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"newtable", "The destination where the table is being copied to."})
|
||||
ap.SupportsFlag(forceParam, "f", "If data already exists in the destination, the force flag will allow the target to be overwritten.")
|
||||
@@ -114,140 +77,24 @@ func (cmd CpCmd) Exec(ctx context.Context, commandStr string, args []string, dEn
|
||||
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblCpDocs, ap))
|
||||
apr := cli.ParseArgs(ap, args, help)
|
||||
|
||||
if apr.NArg() < 2 || apr.NArg() > 3 {
|
||||
if apr.NArg() != 2 {
|
||||
usage()
|
||||
return 1
|
||||
}
|
||||
|
||||
force := apr.Contains(forceParam)
|
||||
working, verr := commands.GetWorkingWithVErr(dEnv)
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
oldTbl, newTbl := apr.Arg(0), apr.Arg(1)
|
||||
|
||||
queryStr := ""
|
||||
if force := apr.Contains(forceParam); force {
|
||||
queryStr = fmt.Sprintf("DROP TABLE IF EXISTS `%s`;", newTbl)
|
||||
}
|
||||
queryStr = fmt.Sprintf("%sCREATE TABLE `%s` LIKE `%s`;", queryStr, newTbl, oldTbl)
|
||||
queryStr = fmt.Sprintf("%sINSERT INTO `%s` SELECT * FROM `%s`;", queryStr, newTbl, oldTbl)
|
||||
|
||||
root := working
|
||||
|
||||
var oldTbl, newTbl string
|
||||
if apr.NArg() == 3 {
|
||||
var cm *doltdb.Commit
|
||||
cm, verr = commands.ResolveCommitWithVErr(dEnv, apr.Arg(0))
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
var err error
|
||||
root, err = cm.GetRootValue()
|
||||
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("error: failed to get root value").AddCause(err).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
oldTbl, newTbl = apr.Arg(1), apr.Arg(2)
|
||||
} else {
|
||||
oldTbl, newTbl = apr.Arg(0), apr.Arg(1)
|
||||
}
|
||||
|
||||
if err := schcmds.ValidateTableNameForCreate(newTbl); err != nil {
|
||||
return commands.HandleVErrAndExitCode(err, usage)
|
||||
}
|
||||
|
||||
_, ok, err := root.GetTable(ctx, oldTbl)
|
||||
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("error: failed to get table").AddCause(err).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
if !ok {
|
||||
verr = errhand.BuildDError("Table '%s' not found in root", oldTbl).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
has, err := working.HasTable(ctx, newTbl)
|
||||
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("error: failed to get tables").AddCause(err).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
} else if !force && has {
|
||||
verr = errhand.BuildDError("Data already exists in '%s'. Use -f to overwrite.", newTbl).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
cpOpts := copyOptions{
|
||||
oldTblName: oldTbl,
|
||||
newTblName: newTbl,
|
||||
contOnErr: true,
|
||||
force: force,
|
||||
Src: mvdata.TableDataLocation{Name: oldTbl},
|
||||
Dest: mvdata.TableDataLocation{Name: newTbl},
|
||||
}
|
||||
|
||||
mover, verr := newTableCopyDataMover(ctx, root, dEnv.FS, cpOpts, nil)
|
||||
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
skipped, verr := mvdata.MoveData(ctx, dEnv, mover, cpOpts)
|
||||
|
||||
if skipped > 0 {
|
||||
cli.PrintErrln(color.YellowString("Lines skipped: %d", skipped))
|
||||
}
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
func newTableCopyDataMover(ctx context.Context, root *doltdb.RootValue, fs filesys.Filesys, co copyOptions, statsCB noms.StatsCB) (*mvdata.DataMover, errhand.VerboseError) {
|
||||
var rd table.TableReadCloser
|
||||
var err error
|
||||
|
||||
ow, err := co.checkOverwrite(ctx, root, fs)
|
||||
if err != nil {
|
||||
return nil, errhand.VerboseErrorFromError(err)
|
||||
}
|
||||
if ow {
|
||||
return nil, errhand.BuildDError("%s already exists. Use -f to overwrite.", co.DestName()).Build()
|
||||
}
|
||||
|
||||
rd, srcIsSorted, err := co.Src.NewReader(ctx, root, fs, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, errhand.BuildDError("Error creating reader for %s.", co.newTblName).AddCause(err).Build()
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if rd != nil {
|
||||
rd.Close(ctx)
|
||||
}
|
||||
}()
|
||||
|
||||
oldTblSch := rd.GetSchema()
|
||||
cc, err := root.GenerateTagsForNewColColl(ctx, co.newTblName, oldTblSch.GetAllCols())
|
||||
|
||||
if err != nil {
|
||||
return nil, errhand.BuildDError("Error create schema for new table %s", co.newTblName).AddCause(err).Build()
|
||||
}
|
||||
|
||||
newTblSch := schema.SchemaFromCols(cc)
|
||||
newTblSch.Indexes().Merge(oldTblSch.Indexes().AllIndexes()...)
|
||||
|
||||
transforms, err := mvdata.NameMapTransform(oldTblSch, newTblSch, make(rowconv.NameMapper))
|
||||
|
||||
if err != nil {
|
||||
return nil, errhand.BuildDError("Error determining the mapping from input fields to output fields.").AddDetails(
|
||||
"When attempting to move data from %s to %s, determine the mapping from input fields to output fields.", co.SrcName(), co.DestName()).AddCause(err).Build()
|
||||
}
|
||||
|
||||
wr, err := co.Dest.NewCreatingWriter(ctx, co, root, fs, srcIsSorted, newTblSch, statsCB)
|
||||
|
||||
if err != nil {
|
||||
return nil, errhand.BuildDError("Could not create table writer for %s", co.newTblName).AddCause(err).Build()
|
||||
}
|
||||
|
||||
imp := &mvdata.DataMover{Rd: rd, Transforms: transforms, Wr: wr, ContOnErr: co.contOnErr}
|
||||
rd = nil
|
||||
|
||||
return imp, nil
|
||||
cli.CliOut = ioutil.Discard // display nothing on success
|
||||
return commands.SqlCmd{}.Exec(ctx, "", []string{
|
||||
fmt.Sprintf("--%s", commands.BatchFlag),
|
||||
fmt.Sprintf(`--%s`, commands.QueryFlag),
|
||||
queryStr,
|
||||
}, dEnv)
|
||||
}
|
||||
|
||||
@@ -16,14 +16,11 @@ package tblcmds
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands/schcmds"
|
||||
"fmt"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/cli"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/commands"
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
|
||||
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/env"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/argparser"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
@@ -87,51 +84,17 @@ func (cmd MvCmd) Exec(ctx context.Context, commandStr string, args []string, dEn
|
||||
return 1
|
||||
}
|
||||
|
||||
force := apr.Contains(forceParam)
|
||||
|
||||
working, verr := commands.GetWorkingWithVErr(dEnv)
|
||||
if verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
oldName := apr.Arg(0)
|
||||
newName := apr.Arg(1)
|
||||
|
||||
if doltdb.HasDoltPrefix(oldName) {
|
||||
return commands.HandleVErrAndExitCode(
|
||||
errhand.BuildDError("error renaming table %s", oldName).AddCause(doltdb.ErrSystemTableCannotBeModified).Build(), usage)
|
||||
queryStr := ""
|
||||
if force := apr.Contains(forceParam); force {
|
||||
queryStr = fmt.Sprintf("DROP TABLE IF EXISTS `%s`;", newName)
|
||||
}
|
||||
queryStr = fmt.Sprintf("%sRENAME TABLE `%s` TO `%s`;", queryStr, oldName, newName)
|
||||
|
||||
if verr = schcmds.ValidateTableNameForCreate(newName); verr != nil {
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
_, ok, err := working.GetTable(ctx, oldName)
|
||||
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("error: failed to read tables from working set").Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
if !ok {
|
||||
verr = errhand.BuildDError("Table '%s' not found.", oldName).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
has, err := working.HasTable(ctx, newName)
|
||||
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("error: failed to read tables from working set").AddCause(err).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
} else if !force && has {
|
||||
verr = errhand.BuildDError("Data already exists in '%s'. Use -f to overwrite.", newName).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
working, err = working.RenameTable(ctx, oldName, newName)
|
||||
if err != nil {
|
||||
verr = errhand.BuildDError("Unable to remove '%s'", oldName).Build()
|
||||
return commands.HandleVErrAndExitCode(verr, usage)
|
||||
}
|
||||
|
||||
return commands.HandleVErrAndExitCode(commands.UpdateWorkingWithVErr(dEnv, working), usage)
|
||||
return commands.SqlCmd{}.Exec(ctx, "", []string{
|
||||
fmt.Sprintf("--%s", commands.BatchFlag),
|
||||
fmt.Sprintf(`--%s`, commands.QueryFlag),
|
||||
queryStr,
|
||||
}, dEnv)
|
||||
}
|
||||
|
||||
@@ -16,6 +16,8 @@ package tblcmds
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
eventsapi "github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
@@ -83,33 +85,15 @@ func (cmd RmCmd) Exec(ctx context.Context, commandStr string, args []string, dEn
|
||||
}
|
||||
}
|
||||
|
||||
working, verr := commands.GetWorkingWithVErr(dEnv)
|
||||
if verr != nil {
|
||||
return exitWithVerr(verr)
|
||||
queryStr := ""
|
||||
for _, tableName := range apr.Args() {
|
||||
queryStr = fmt.Sprintf("%sDROP TABLE `%s`;", queryStr, tableName)
|
||||
}
|
||||
|
||||
if verr := commands.ValidateTablesWithVErr(apr.Args(), working); verr != nil {
|
||||
return exitWithVerr(verr)
|
||||
}
|
||||
|
||||
if verr := removeTables(ctx, dEnv, apr.Args(), working); verr != nil {
|
||||
return exitWithVerr(verr)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func exitWithVerr(verr errhand.VerboseError) int {
|
||||
cli.PrintErrln(verr.Verbose())
|
||||
return 1
|
||||
}
|
||||
|
||||
func removeTables(ctx context.Context, dEnv *env.DoltEnv, tables []string, working *doltdb.RootValue) errhand.VerboseError {
|
||||
working, err := working.RemoveTables(ctx, tables...)
|
||||
|
||||
if err != nil {
|
||||
return errhand.BuildDError("Unable to remove table(s)").AddCause(err).Build()
|
||||
}
|
||||
|
||||
return commands.UpdateWorkingWithVErr(dEnv, working)
|
||||
cli.CliOut = ioutil.Discard // display nothing on success
|
||||
return commands.SqlCmd{}.Exec(ctx, "", []string{
|
||||
fmt.Sprintf("--%s", commands.BatchFlag),
|
||||
fmt.Sprintf(`--%s`, commands.QueryFlag),
|
||||
queryStr,
|
||||
}, dEnv)
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ require (
|
||||
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6
|
||||
github.com/lestrrat-go/strftime v1.0.3 // indirect
|
||||
github.com/liquidata-inc/dolt/go/gen/proto/dolt/services/eventsapi v0.0.0-20200730200742-c031ec8cba06
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200911214625-e81d5c813cb5
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200921093816-a2f606c0d13d
|
||||
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0
|
||||
github.com/liquidata-inc/mmap-go v1.0.3
|
||||
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15
|
||||
|
||||
62
go/go.sum
62
go/go.sum
@@ -43,6 +43,7 @@ github.com/CAFxX/gcnotifier v0.0.0-20190112062741-224a280d589d/go.mod h1:Rn2zM2M
|
||||
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
|
||||
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/DataDog/datadog-go v0.0.0-20180822151419-281ae9f2d895/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/DataDog/datadog-go v3.4.0+incompatible h1:LZ0OTmlvhCBT0VYUvhGu8Lrc7WqNCj6Zw9HnMi0V6mA=
|
||||
github.com/DataDog/datadog-go v3.4.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
@@ -77,6 +78,7 @@ github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg=
|
||||
github.com/armon/go-metrics v0.3.2 h1:EyUnxyP2yaGpLgMiuyyz8sHnByqeTJUfGs72pdH0i4A=
|
||||
github.com/armon/go-metrics v0.3.2/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
@@ -87,6 +89,7 @@ github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:o
|
||||
github.com/attic-labs/kingpin v2.2.7-0.20180312050558-442efcfac769+incompatible h1:wd5mq8xSfwCYd1JpQ309s+3tTlP/gifcG2awOA3x5Vk=
|
||||
github.com/attic-labs/kingpin v2.2.7-0.20180312050558-442efcfac769+incompatible/go.mod h1:Cp18FeDCvsK+cD2QAGkqerGjrgSXLiJWnjHeY2mneBc=
|
||||
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
|
||||
github.com/aws/aws-sdk-go v0.0.0-20180223184012-ebef4262e06a/go.mod h1:ZRmQr0FajVIyZ4ZzBYKG5P3ZqPz9IHG41ZoMu1ADI3k=
|
||||
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
||||
github.com/aws/aws-sdk-go v1.32.6 h1:HoswAabUWgnrUF7X/9dr4WRgrr8DyscxXvTDm7Qw/5c=
|
||||
github.com/aws/aws-sdk-go v1.32.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
|
||||
@@ -118,17 +121,21 @@ github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp
|
||||
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cockroachdb/cmux v0.0.0-20170110192607-30d10be49292/go.mod h1:qRiX68mZX1lGBkTWyp3CLcenw9I94W2dLeRvMzcn9N4=
|
||||
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
||||
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf h1:5ZeQB3mThuz5C2MSER6T5GdtXTF9CMMk42F9BOyRsEQ=
|
||||
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf/go.mod h1:BO2rLUAZMrpgh6GBVKi0Gjdqw2MgCtJrtmUdDeZRKjY=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||
github.com/coreos/etcd v0.0.0-20170626015032-703663d1f6ed/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
@@ -156,6 +163,7 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239 h1:Ghm4eQYC0nEPnSJdVkTrXpu9KtoVCSo1hg7mtI7G9KU=
|
||||
github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239/go.mod h1:Gdwt2ce0yfBxPvZrHkprdPPTTS3N5rwmLE8T22KBXlw=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
@@ -166,10 +174,12 @@ github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:r
|
||||
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
|
||||
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/ghodss/yaml v0.0.0-20161207003320-04f313413ffd/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-ini/ini v1.12.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.10.0 h1:dXFJfIHVvUcpSgDOV+Ne6t7jXri8Tfv2uOLHUZ2XNuo=
|
||||
@@ -250,6 +260,7 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
@@ -281,6 +292,7 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+
|
||||
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/handlers v1.3.0/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
|
||||
github.com/gorilla/handlers v1.4.2 h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=
|
||||
@@ -290,13 +302,17 @@ github.com/gorilla/mux v1.7.0/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
|
||||
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc=
|
||||
github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||
github.com/gorilla/websocket v0.0.0-20160912153041-2d1e4548da23/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.1.0/go.mod h1:f5nM7jw/oeRSadq3xCzHAvxcr8HZnzsqU6ILg/0NiiE=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/grpc-ecosystem/grpc-gateway v0.0.0-20161128002007-199c40a060d1/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/hashicorp/consul v1.4.0/go.mod h1:mFrjN1mfidgJfYP1xrJCF+AfRhr6Eaqhb2+sfyn/OOI=
|
||||
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
|
||||
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
|
||||
@@ -308,9 +324,11 @@ github.com/hashicorp/go-immutable-radix v1.1.0 h1:vN9wG1D6KG6YHRTWr8512cxGOVgTME
|
||||
github.com/hashicorp/go-immutable-radix v1.1.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=
|
||||
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||
github.com/hashicorp/go-rootcerts v0.0.0-20160503143440-6bb64b370b90/go.mod h1:o4zcYY1e0GEZI6eSEr+43QDYmuGglw1qSO6qdHUHCgg=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=
|
||||
@@ -331,8 +349,10 @@ github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T
|
||||
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/memberlist v0.1.4/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/memberlist v0.1.6 h1:ouPxvwKYaNZe+eTcHxYP0EblPduVLvIPycul+vv8his=
|
||||
github.com/hashicorp/memberlist v0.1.6/go.mod h1:5VDNHjqFMgEcclnwmkCnC99IPwxBmIsxwY8qn+Nl0H4=
|
||||
github.com/hashicorp/serf v0.0.0-20161207011743-d3a67ab21bc8/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
||||
@@ -344,6 +364,7 @@ github.com/jedib0t/go-pretty v4.3.1-0.20191104025401-85fe5d6a7c4d+incompatible h
|
||||
github.com/jedib0t/go-pretty v4.3.1-0.20191104025401-85fe5d6a7c4d+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
|
||||
github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869 h1:IPJ3dvxmJ4uczJe5YQdrYB16oTJlGSC/OyZDqUk9xX4=
|
||||
github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869/go.mod h1:cJ6Cj7dQo+O6GJNiMx+Pa94qKj+TG8ONdKHgMNIyyag=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/jmespath/go-jmespath v0.3.0 h1:OS12ieG61fsCg5+qLJ+SsW9NicxNkg3b25OyT2yCeUc=
|
||||
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
|
||||
@@ -360,6 +381,7 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b h1:FQ7+9fxhyp82ks9vAuyPzG0/vVbWwMwLJ+P6yJI5FN8=
|
||||
github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b/go.mod h1:HMcgvsgd0Fjj4XXDkbjdmlbI505rUPBs6WBMYg2pXks=
|
||||
@@ -373,7 +395,11 @@ github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6/go.mod h1:UtDV9qK925
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v0.0.0-20180801095237-b50017755d44/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/klauspost/crc32 v1.2.0/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
|
||||
github.com/klauspost/pgzip v1.2.0/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
|
||||
@@ -384,6 +410,7 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/krishicks/yaml-patch v0.0.10/go.mod h1:Sm5TchwZS6sm7RJoyg87tzxm2ZcKzdRE4Q7TjNhPrME=
|
||||
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc h1:RKf14vYWi2ttpEmkA4aQ3j4u9dStX2t4M8UM6qqNsG8=
|
||||
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc/go.mod h1:kopuH9ugFRkIXf3YoqHKyrJ9YfUFsckUU9S7B+XP+is=
|
||||
github.com/lestrrat-go/strftime v1.0.1 h1:o7qz5pmLzPDLyGW4lG6JvTKPUfTFXwe+vOamIYWtnVU=
|
||||
@@ -397,12 +424,19 @@ github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-b
|
||||
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200911214625-e81d5c813cb5 h1:iqHHM9uv2NjAoSVc5KQTcFzsV4wb0uwbCIL6ULVM13c=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200911214625-e81d5c813cb5/go.mod h1:T6gBgEkMsA2WUGPD/J25wuVyX+4Gd2k9mPBta9YDoxY=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200916145502-4eafaab16140 h1:ka5fhv0hSfdxEMLyfqZIYYOe04h6p13hoVRKz2BZo/Q=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200916145502-4eafaab16140/go.mod h1:T6gBgEkMsA2WUGPD/J25wuVyX+4Gd2k9mPBta9YDoxY=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200918120654-0049c3926457 h1:SNt2+HGqlGxQ26CjS8XlUOHk75IIdonNCXVNGFHwFjY=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200918120654-0049c3926457/go.mod h1:T6gBgEkMsA2WUGPD/J25wuVyX+4Gd2k9mPBta9YDoxY=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200921093816-a2f606c0d13d h1:NX1ZR4S8Ee3Qud8XYYxcbNF/F9Kmr28VTikawaJB+9s=
|
||||
github.com/liquidata-inc/go-mysql-server v0.6.1-0.20200921093816-a2f606c0d13d/go.mod h1:T6gBgEkMsA2WUGPD/J25wuVyX+4Gd2k9mPBta9YDoxY=
|
||||
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0 h1:phMgajKClMUiIr+hF2LGt8KRuUa2Vd2GI1sNgHgSXoU=
|
||||
github.com/liquidata-inc/ishell v0.0.0-20190514193646-693241f1f2a0/go.mod h1:YC1rI9k5gx8D02ljlbxDfZe80s/iq8bGvaaQsvR+qxs=
|
||||
github.com/liquidata-inc/mmap-go v1.0.3 h1:2LndAeAtup9rpvUmu4wZSYCsjCQ0Zpc+NqE+6+PnT7g=
|
||||
github.com/liquidata-inc/mmap-go v1.0.3/go.mod h1:w0doE7jfkuDEZyxb/zD3VWnRaQBYx1uDTS816kH8HoY=
|
||||
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15 h1:H3RwcYfzkdW4kFh7znTUopcX3XZqnFXm6pcmxSy0mNo=
|
||||
github.com/liquidata-inc/sqllogictest/go v0.0.0-20200320151923-b11801f10e15/go.mod h1:kKRVtyuomkqz15YFRpS0OT8kpsU8y/F3jyiZtvALdKU=
|
||||
github.com/liquidata-inc/vitess v0.0.0-20200828190113-f278600fb87c/go.mod h1:E8nYT1vcL2NROtFwUN02CiA6cz276CFuB0Q1Zja5CAo=
|
||||
github.com/liquidata-inc/vitess v0.0.0-20200911213404-87f76781a7c7 h1:CGiw+RfbIXsE+BPA8d7WpyvOVJUyw1OzDNxxTMa8qtI=
|
||||
github.com/liquidata-inc/vitess v0.0.0-20200911213404-87f76781a7c7/go.mod h1:8PM478sNETVVNWL2tVW+Uy4LIH31/x/tphLIRPh/hF4=
|
||||
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
|
||||
@@ -422,6 +456,7 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-runewidth v0.0.1/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
@@ -435,6 +470,7 @@ github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyex
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/miekg/dns v1.1.27 h1:aEH/kqUzUxGJ/UHcEKdJY+ugH6WEzsEBBSPa8zuy1aM=
|
||||
github.com/miekg/dns v1.1.27/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
||||
github.com/minio/minio-go v0.0.0-20190131015406-c8a261de75c1/go.mod h1:vuvdOZLJuf5HmJAJrKV64MmozrSsk+or0PB5dzdfspg=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
@@ -466,13 +502,17 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLA
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/tablewriter v0.0.0-20160115111002-cca8bbc07984/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852 h1:Yl0tPBa8QPjGmesFh1D0rDy+q1Twx6FyU7VWHi8wZbI=
|
||||
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852/go.mod h1:eqOVx5Vwu4gd2mmMZvVZsgIqNSaW3xxRThUJ0k/TPk4=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||
github.com/opentracing-contrib/go-grpc v0.0.0-20180928155321-4b5a12d3ff02/go.mod h1:JNdpVEzCpXBgIiv4ds+TzhN1hrtxq6ClLrTlT9OQRSc=
|
||||
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
|
||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
@@ -487,6 +527,7 @@ github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIw
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
|
||||
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pborman/uuid v0.0.0-20160824210600-b984ec7fa9ff/go.mod h1:VyrYX9gd7irzKovcSS6BIIEwPRkP2Wm2m9ufcdFSJ34=
|
||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
||||
@@ -509,9 +550,11 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM=
|
||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
||||
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
|
||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||
github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g=
|
||||
github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og=
|
||||
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
||||
github.com/prometheus/client_golang v1.4.1 h1:FFSuS004yOQEtDdTq+TAOLP5xUq63KqAFYyOi8zA+Y8=
|
||||
@@ -524,16 +567,21 @@ github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6T
|
||||
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc=
|
||||
github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA=
|
||||
github.com/prometheus/common v0.9.1 h1:KOMtN28tlbam3/7ZKEYKHhKoJZYYj3gMH4uc62x7X7U=
|
||||
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
|
||||
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
|
||||
github.com/prometheus/procfs v0.0.8 h1:+fpWZdT24pJBiqJdAwYBjPSk+5YmQzYNPYzQsdzLkt8=
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
@@ -553,6 +601,7 @@ github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFo
|
||||
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
|
||||
github.com/sanity-io/litter v1.2.0 h1:DGJO0bxH/+C2EukzOSBmAlxmkhVMGqzvcx/rvySYw9M=
|
||||
github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4=
|
||||
github.com/satori/go.uuid v0.0.0-20160713180306-0aa62d5ddceb/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
||||
@@ -576,6 +625,8 @@ github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrf
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY=
|
||||
@@ -612,6 +663,7 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/tchap/go-patricia v0.0.0-20160729071656-dd168db6051b/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I=
|
||||
github.com/tealeg/xlsx v1.0.5 h1:+f8oFmvY8Gw1iUXzPk+kz+4GpbDZPK1FhPiQRd+ypgE=
|
||||
github.com/tealeg/xlsx v1.0.5/go.mod h1:btRS8dz54TDnvKNosuAqxrM1QgN1udgk9O34bDCnORM=
|
||||
github.com/tebeka/strftime v0.1.4 h1:e0FKSyxthD1Xk4cIixFPoyfD33u2SbjNngOaaC3ePoU=
|
||||
@@ -630,7 +682,9 @@ github.com/uber/jaeger-lib v2.0.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6
|
||||
github.com/uber/jaeger-lib v2.2.0+incompatible h1:MxZXOiR2JuoANZ3J6DE/U0kSFv/eJ/GfSYVCjK7dyaw=
|
||||
github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
|
||||
@@ -639,6 +693,7 @@ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/z-division/go-zookeeper v0.0.0-20190128072838-6d7457066b9b/go.mod h1:JNALoWa+nCXR8SmgLluHcBNVJgyejzpKPZk9pX2yXXE=
|
||||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/bbolt v1.3.3 h1:MUGmc65QhB3pIlaQ5bB4LwqSj6GIonVJXpZiaKNyaKk=
|
||||
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
@@ -672,6 +727,7 @@ go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190128193316-c7b33c32a30b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
@@ -679,6 +735,7 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190829043050-9756ffdc2472/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
@@ -776,6 +833,7 @@ golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190124100055-b90733256f2e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -790,6 +848,7 @@ golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190926180325-855e68c8590b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -968,6 +1027,7 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||
google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
@@ -977,6 +1037,8 @@ gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qS
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
|
||||
gopkg.in/ini.v1 v1.41.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ldap.v2 v2.5.0/go.mod h1:oI0cpe/D7HRtBQl8aTg+ZmzFUAvu4lsv3eLXMLGFxWk=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w=
|
||||
gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
||||
|
||||
@@ -48,18 +48,16 @@ type testCommand struct {
|
||||
|
||||
var setupCommon = []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create table parent (" +
|
||||
"id int comment 'tag:0'," +
|
||||
"v1 int comment 'tag:1'," +
|
||||
"v2 int comment 'tag:2'," +
|
||||
//"index v1_idx (v1)," +
|
||||
//"index v1_idx (v2)," +
|
||||
"id int," +
|
||||
"v1 int," +
|
||||
"v2 int," +
|
||||
"index v1_idx (v1)," +
|
||||
"index v2_idx (v2)," +
|
||||
"primary key(id));"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add index v1_idx (v1);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add index v2_idx (v2);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create table child (" +
|
||||
"id int comment 'tag:10', " +
|
||||
"v1 int comment 'tag:11'," +
|
||||
"v2 int comment 'tag:12'," +
|
||||
"id int, " +
|
||||
"v1 int," +
|
||||
"v2 int," +
|
||||
"primary key(id));"}},
|
||||
}
|
||||
|
||||
@@ -119,10 +117,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "child_fk",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -139,10 +137,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "multi_col",
|
||||
TableName: "child",
|
||||
TableIndex: "v1v2_idx",
|
||||
TableColumns: []uint64{11, 12},
|
||||
TableColumns: []uint64{1215, 8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1v2_idx",
|
||||
ReferencedTableColumns: []uint64{1, 2},
|
||||
ReferencedTableColumns: []uint64{6269, 7947},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -161,19 +159,19 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk1",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
{
|
||||
Name: "fk2",
|
||||
TableName: "child",
|
||||
TableIndex: "v2_idx",
|
||||
TableColumns: []uint64{12},
|
||||
TableColumns: []uint64{8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v2_idx",
|
||||
ReferencedTableColumns: []uint64{2},
|
||||
ReferencedTableColumns: []uint64{7947},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -181,8 +179,8 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
name: "create table with foreign key",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", `create table new_table (
|
||||
id int comment 'tag:20',
|
||||
v1 int comment 'tag:21',
|
||||
id int,
|
||||
v1 int,
|
||||
constraint new_fk foreign key (v1) references parent(v1),
|
||||
primary key(id));`}},
|
||||
},
|
||||
@@ -192,10 +190,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
TableName: "new_table",
|
||||
// unnamed indexes take the column name
|
||||
TableIndex: "v1",
|
||||
TableColumns: []uint64{21},
|
||||
TableColumns: []uint64{7597},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -214,20 +212,20 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk1",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_Cascade,
|
||||
},
|
||||
{
|
||||
Name: "fk2",
|
||||
TableName: "child",
|
||||
TableIndex: "v2_idx",
|
||||
TableColumns: []uint64{12},
|
||||
TableColumns: []uint64{8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v2_idx",
|
||||
ReferencedTableColumns: []uint64{2},
|
||||
ReferencedTableColumns: []uint64{7947},
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_SetNull,
|
||||
},
|
||||
},
|
||||
@@ -244,10 +242,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "child_fk",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_Cascade,
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_Cascade,
|
||||
},
|
||||
@@ -256,19 +254,19 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
{
|
||||
name: "create foreign keys with all update and delete rules",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v3 int comment 'tag:3';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v4 int comment 'tag:4';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v5 int comment 'tag:5';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v3 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v4 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add column v5 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add index v3_idx (v3);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add index v4_idx (v4);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table parent add index v5_idx (v5);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", `create table sibling (
|
||||
id int comment 'tag:20',
|
||||
v1 int comment 'tag:21',
|
||||
v2 int comment 'tag:22',
|
||||
v3 int comment 'tag:23',
|
||||
v4 int comment 'tag:24',
|
||||
v5 int comment 'tag:25',
|
||||
id int,
|
||||
v1 int,
|
||||
v2 int,
|
||||
v3 int,
|
||||
v4 int,
|
||||
v5 int,
|
||||
constraint fk1 foreign key (v1) references parent(v1),
|
||||
constraint fk2 foreign key (v2) references parent(v2) on delete restrict on update restrict,
|
||||
constraint fk3 foreign key (v3) references parent(v3) on delete cascade on update cascade,
|
||||
@@ -281,19 +279,19 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk1",
|
||||
TableName: "sibling",
|
||||
TableIndex: "v1",
|
||||
TableColumns: []uint64{21},
|
||||
TableColumns: []uint64{16080},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
{
|
||||
Name: "fk2",
|
||||
TableName: "sibling",
|
||||
TableIndex: "v2",
|
||||
TableColumns: []uint64{22},
|
||||
TableColumns: []uint64{7576},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v2_idx",
|
||||
ReferencedTableColumns: []uint64{2},
|
||||
ReferencedTableColumns: []uint64{7947},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_Restrict,
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_Restrict,
|
||||
},
|
||||
@@ -301,10 +299,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk3",
|
||||
TableName: "sibling",
|
||||
TableIndex: "v3",
|
||||
TableColumns: []uint64{23},
|
||||
TableColumns: []uint64{16245},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v3_idx",
|
||||
ReferencedTableColumns: []uint64{3},
|
||||
ReferencedTableColumns: []uint64{5237},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_Cascade,
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_Cascade,
|
||||
},
|
||||
@@ -312,10 +310,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk4",
|
||||
TableName: "sibling",
|
||||
TableIndex: "v4",
|
||||
TableColumns: []uint64{24},
|
||||
TableColumns: []uint64{9036},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v4_idx",
|
||||
ReferencedTableColumns: []uint64{4},
|
||||
ReferencedTableColumns: []uint64{14774},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_SetNull,
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_SetNull,
|
||||
},
|
||||
@@ -323,10 +321,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk5",
|
||||
TableName: "sibling",
|
||||
TableIndex: "v5",
|
||||
TableColumns: []uint64{25},
|
||||
TableColumns: []uint64{11586},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v5_idx",
|
||||
ReferencedTableColumns: []uint64{5},
|
||||
ReferencedTableColumns: []uint64{8125},
|
||||
OnUpdate: doltdb.ForeignKeyReferenceOption_NoAction,
|
||||
OnDelete: doltdb.ForeignKeyReferenceOption_NoAction,
|
||||
},
|
||||
@@ -343,10 +341,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
TableName: "child",
|
||||
// unnamed indexes take the column name
|
||||
TableIndex: "v1",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -358,13 +356,13 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
},
|
||||
fks: []doltdb.ForeignKey{
|
||||
{
|
||||
Name: "ajk4bsgi",
|
||||
Name: "19eof0mu",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -372,21 +370,21 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
name: "create table with unnamed foreign key",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", `create table new_table (
|
||||
id int comment 'tag:20',
|
||||
v1 int comment 'tag:21',
|
||||
id int,
|
||||
v1 int,
|
||||
foreign key (v1) references parent(v1),
|
||||
primary key(id));`}},
|
||||
},
|
||||
fks: []doltdb.ForeignKey{
|
||||
{
|
||||
Name: "7l96tsms",
|
||||
Name: "mv9a59oo",
|
||||
TableName: "new_table",
|
||||
// unnamed indexes take the column name
|
||||
TableIndex: "v1",
|
||||
TableColumns: []uint64{21},
|
||||
TableColumns: []uint64{7597},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -401,13 +399,13 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
},
|
||||
fks: []doltdb.ForeignKey{
|
||||
{
|
||||
Name: "ltbb4q51",
|
||||
Name: "n4qun7ju",
|
||||
TableName: "child",
|
||||
TableIndex: "v1v2_idx",
|
||||
TableColumns: []uint64{11, 12},
|
||||
TableColumns: []uint64{1215, 8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1v2_idx",
|
||||
ReferencedTableColumns: []uint64{1, 2},
|
||||
ReferencedTableColumns: []uint64{6269, 7947},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -423,22 +421,22 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
},
|
||||
fks: []doltdb.ForeignKey{
|
||||
{
|
||||
Name: "ajk4bsgi",
|
||||
Name: "19eof0mu",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
{
|
||||
Name: "jui84jda",
|
||||
Name: "p79c8qtq",
|
||||
TableName: "child",
|
||||
TableIndex: "v2_idx",
|
||||
TableColumns: []uint64{12},
|
||||
TableColumns: []uint64{8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v2_idx",
|
||||
ReferencedTableColumns: []uint64{2},
|
||||
ReferencedTableColumns: []uint64{7947},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -461,10 +459,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk1",
|
||||
TableName: "child",
|
||||
TableIndex: "v1_idx",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{1215},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1_idx",
|
||||
ReferencedTableColumns: []uint64{1},
|
||||
ReferencedTableColumns: []uint64{6269},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -490,10 +488,10 @@ var foreignKeyTests = []foreignKeyTest{
|
||||
Name: "fk1",
|
||||
TableName: "child",
|
||||
TableIndex: "v1v2",
|
||||
TableColumns: []uint64{11, 12},
|
||||
TableColumns: []uint64{1215, 8734},
|
||||
ReferencedTableName: "parent",
|
||||
ReferencedTableIndex: "v1v2",
|
||||
ReferencedTableColumns: []uint64{1, 2},
|
||||
ReferencedTableColumns: []uint64{6269, 7947},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -697,6 +697,30 @@ func (t *Table) DeleteIndexRowData(ctx context.Context, indexName string) (*Tabl
|
||||
return t.SetIndexData(ctx, indexesMap)
|
||||
}
|
||||
|
||||
// RenameIndexRowData changes the name for the index data. Does not verify that the new name is unoccupied. If the old
|
||||
// name does not exist, then this returns the called table without error.
|
||||
func (t *Table) RenameIndexRowData(ctx context.Context, oldIndexName, newIndexName string) (*Table, error) {
|
||||
indexesMap, err := t.GetIndexData(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
oldKey := types.String(oldIndexName)
|
||||
newKey := types.String(newIndexName)
|
||||
if indexRowData, ok, err := indexesMap.MaybeGet(ctx, oldKey); err != nil {
|
||||
return nil, err
|
||||
} else if ok {
|
||||
indexesMap, err = indexesMap.Edit().Set(newKey, indexRowData).Remove(oldKey).Map(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
return t, nil
|
||||
}
|
||||
|
||||
return t.SetIndexData(ctx, indexesMap)
|
||||
}
|
||||
|
||||
func rebuildIndexRowData(ctx context.Context, vrw types.ValueReadWriter, sch schema.Schema, tblRowData types.Map, index schema.Index) (types.Map, error) {
|
||||
emptyIndexMap, err := types.NewMap(ctx, vrw)
|
||||
if err != nil {
|
||||
|
||||
@@ -18,11 +18,11 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/liquidata-inc/go-mysql-server/sql"
|
||||
"github.com/liquidata-inc/vitess/go/sqltypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
@@ -44,8 +44,8 @@ const (
|
||||
_
|
||||
AgeTag
|
||||
)
|
||||
const DripTag = 13
|
||||
const DripTagRebased = 19
|
||||
const DripTag = 2360
|
||||
const DripTagRebased = 2361
|
||||
|
||||
type RebaseTagTest struct {
|
||||
// The name of this test. Names should be unique and descriptive.
|
||||
@@ -65,13 +65,13 @@ type RebaseTagTest struct {
|
||||
ExpectedErrStr string
|
||||
}
|
||||
|
||||
var createPeopleTable = fmt.Sprintf(`
|
||||
var createPeopleTable = `
|
||||
create table people (
|
||||
id int comment 'tag:%d',
|
||||
name varchar(20) not null comment 'tag:%d',
|
||||
age int comment 'tag:%d',
|
||||
id int,
|
||||
name varchar(20) not null,
|
||||
age int,
|
||||
primary key (id)
|
||||
);`, IdTag, NameTag, AgeTag)
|
||||
);`
|
||||
|
||||
func columnCollection(cols ...schema.Column) *schema.ColCollection {
|
||||
pcc, err := schema.NewColCollection(cols...)
|
||||
@@ -90,7 +90,7 @@ func newRow(vals row.TaggedValues, cc *schema.ColCollection) row.Row {
|
||||
}
|
||||
|
||||
func newColTypeInfo(name string, tag uint64, typeInfo typeinfo.TypeInfo, partOfPK bool, constraints ...schema.ColConstraint) schema.Column {
|
||||
c, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", constraints...)
|
||||
c, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", "", constraints...)
|
||||
if err != nil {
|
||||
panic("could not create column")
|
||||
}
|
||||
@@ -141,7 +141,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
Name: "rebase entire history",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values
|
||||
(10, "Patty Bouvier", 40, 8.5),
|
||||
(11, "Selma Bouvier", 40, 8.5);`},
|
||||
@@ -161,7 +161,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
},
|
||||
OldTag: DripTag,
|
||||
@@ -176,7 +176,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
},
|
||||
@@ -195,7 +195,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `update people set drip=9.9 where id=9;`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
},
|
||||
@@ -213,7 +213,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `update people set drip=9.9 where id=11;`},
|
||||
@@ -234,7 +234,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `update people set drip=9.9 where id=11;`},
|
||||
@@ -258,7 +258,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
(7, "Maggie Simpson", 1),
|
||||
(8, "Milhouse Van Houten", 8);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `update people set age=2 where id=7;`},
|
||||
tc.Query{Query: `delete from people where id=8;`},
|
||||
@@ -281,7 +281,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Branch{BranchName: "newBranch"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "newBranch"},
|
||||
@@ -311,7 +311,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
(9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Branch{BranchName: "newBranch"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `delete from people where id=6;`},
|
||||
tc.Query{Query: `update people set drip=99.9 where id=7;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
@@ -343,7 +343,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Branch{BranchName: "newBranch"},
|
||||
tc.Checkout{BranchName: "newBranch"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
@@ -366,7 +366,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (9, "Jacqueline Bouvier", 80, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Branch{BranchName: "newBranch"},
|
||||
@@ -395,7 +395,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (7, "Maggie Simpson", 1);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
@@ -462,7 +462,7 @@ func testRebaseTagHistory(t *testing.T) {
|
||||
(7, "Maggie Simpson", 1);`},
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (newMaster, oldMaster) and (newMaster, other)
|
||||
|
||||
tc.Query{Query: `alter table people add drip double comment 'tag:` + strconv.Itoa(DripTag) + `';`},
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (oldMaster, other)
|
||||
|
||||
tc.Branch{BranchName: "other"},
|
||||
@@ -528,9 +528,13 @@ func checkSchema(t *testing.T, r *doltdb.RootValue, tableName string, expectedSc
|
||||
require.NoError(t, err)
|
||||
sch, err := tbl.GetSchema(context.Background())
|
||||
require.NoError(t, err)
|
||||
eq, err := schema.SchemasAreEqual(sch, expectedSch)
|
||||
require.NoError(t, err)
|
||||
require.True(t, eq)
|
||||
require.Equal(t, expectedSch.GetAllCols().Size(), sch.GetAllCols().Size())
|
||||
cols := sch.GetAllCols().GetColumns()
|
||||
for i, expectedCol := range expectedSch.GetAllCols().GetColumns() {
|
||||
col := cols[i]
|
||||
col.Tag = expectedCol.Tag
|
||||
assert.Equal(t, expectedCol, col)
|
||||
}
|
||||
}
|
||||
|
||||
func checkRows(t *testing.T, dEnv *env.DoltEnv, root *doltdb.RootValue, tableName string, sch schema.Schema, selectQuery string, expectedRows []row.Row) {
|
||||
|
||||
@@ -29,11 +29,10 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
pkTag = iota
|
||||
c0Tag
|
||||
c1Tag
|
||||
c11Tag
|
||||
c12Tag
|
||||
pkTag = 16191
|
||||
c0Tag = 8734
|
||||
c1Tag = 15903
|
||||
c11Tag = 15001
|
||||
)
|
||||
|
||||
type SuperSchemaTest struct {
|
||||
@@ -53,7 +52,7 @@ type SuperSchemaTest struct {
|
||||
ExpectedErrStr string
|
||||
}
|
||||
|
||||
var testableDef = fmt.Sprintf("create table testable (pk int not null primary key comment 'tag:%d');", pkTag)
|
||||
var testableDef = fmt.Sprintf("create table testable (pk int not null primary key);")
|
||||
|
||||
var SuperSchemaTests = []SuperSchemaTest{
|
||||
{
|
||||
@@ -91,7 +90,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedSchema: schema.SchemaFromCols(columnCollection(
|
||||
@@ -108,7 +107,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
tc.CommitAll{Message: "dropped column c0"},
|
||||
@@ -127,7 +126,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
},
|
||||
@@ -145,7 +144,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
@@ -162,9 +161,9 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.StageAll{},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitStaged{Message: "adding staged column c0"},
|
||||
tc.ResetHard{},
|
||||
},
|
||||
@@ -183,14 +182,14 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int comment 'tag:%d';", c11Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedSchema: schema.SchemaFromCols(columnCollection(
|
||||
@@ -209,14 +208,14 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int comment 'tag:%d';", c11Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
},
|
||||
@@ -238,14 +237,14 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int comment 'tag:%d';", c11Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.Merge{BranchName: "other"},
|
||||
},
|
||||
@@ -268,17 +267,17 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int comment 'tag:%d';", c11Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c12 int comment 'tag:%d';", c12Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c12 int;")},
|
||||
tc.CommitAll{Message: "added columns c11 and c12 on branch other"},
|
||||
tc.Query{Query: "alter table testable drop column c12;"},
|
||||
tc.CommitAll{Message: "dropped column c12 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.Merge{BranchName: "other"},
|
||||
tc.CommitAll{Message: "Merged other into master"},
|
||||
@@ -303,10 +302,10 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int comment 'tag:%d';", c0Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.Query{Query: "create table foo (pk int not null primary key);"},
|
||||
tc.CommitAll{Message: "created tables testable and foo"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int comment 'tag:%d';", c1Tag)},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.Query{Query: "create table qux (pk int not null primary key);"},
|
||||
tc.Query{Query: "drop table foo;"},
|
||||
tc.CommitAll{Message: "added column c1 on branch master, created table qux, dropped table foo"},
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
// Copyright 2020 Liquidata, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package envtestutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils"
|
||||
tc "github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils/testcommands"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
|
||||
)
|
||||
|
||||
type UniqueTagsTest struct {
|
||||
// The name of this test. Names should be unique and descriptive.
|
||||
Name string
|
||||
// The modifying queries to run
|
||||
Commands []tc.Command
|
||||
// Name of the table to be verified
|
||||
TableName string
|
||||
// The schema of the result of the query, nil if an error is expected
|
||||
ExpectedSchema schema.Schema
|
||||
// Expected branch
|
||||
ExpectedBranch string
|
||||
// An expected error string
|
||||
ExpectedErrStr string
|
||||
}
|
||||
|
||||
var UniqueTagsTests = []UniqueTagsTest{
|
||||
{
|
||||
Name: "can create table with tags specified",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:42');`},
|
||||
},
|
||||
TableName: "test",
|
||||
ExpectedSchema: schema.SchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", 42, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
ExpectedBranch: "master",
|
||||
},
|
||||
{
|
||||
Name: "cannot create duplicate tags within a table",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (
|
||||
pk int not null primary key comment 'tag:42',
|
||||
c0 int comment 'tag:42');`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "two different columns with the same tag",
|
||||
},
|
||||
{
|
||||
Name: "cannot create duplicate tags across tables",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:42');`},
|
||||
tc.Query{Query: `create table test2 (pk int not null primary key comment 'tag:42');`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "Cannot create column pk, the tag 42 was already used in table test",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a duplicate tag within a table",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:42');`},
|
||||
tc.Query{Query: `alter table test add column c0 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "Cannot create column c0, the tag 42 was already used in table test",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a duplicate tag across tables",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:0');`},
|
||||
tc.Query{Query: `create table other (pk int not null primary key comment 'tag:42');`},
|
||||
tc.Query{Query: `alter table test add column c0 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "Cannot create column c0, the tag 42 was already used in table other",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a tag that has previously existed in the same table's history",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (
|
||||
pk int not null primary key comment 'tag:0',
|
||||
c0 int comment 'tag:42');`},
|
||||
tc.CommitAll{Message: "created table test"},
|
||||
tc.Query{Query: `alter table test drop column c0;`},
|
||||
tc.CommitAll{Message: "dropped c0"},
|
||||
tc.Query{Query: `alter table test add column c1 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "two different columns with the same tag",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a tag that has previously existed in a different table's history",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:0');`},
|
||||
tc.Query{Query: `create table other (
|
||||
pk int not null primary key comment 'tag:1',
|
||||
c0 int comment 'tag:42');`},
|
||||
tc.CommitAll{Message: "created tables test and other"},
|
||||
tc.Query{Query: `alter table other drop column c0;`},
|
||||
tc.CommitAll{Message: "dropped c0 from other"},
|
||||
tc.Query{Query: `alter table test add column c1 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "two different columns with the same tag",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a tag that has previously existed in a merged branch's history",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:0');`},
|
||||
tc.CommitAll{Message: "created table test"},
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: `alter table test add column c0 int comment 'tag:42';`},
|
||||
tc.CommitAll{Message: "added column c0 to test on branch other"},
|
||||
tc.Query{Query: `alter table test drop column c0;`},
|
||||
tc.CommitAll{Message: "dropped c0 from test on other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Merge{BranchName: "other"},
|
||||
tc.Query{Query: `alter table test add column c1 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "two different columns with the same tag",
|
||||
},
|
||||
{
|
||||
Name: "cannot add a tag that has previously existed in a deleted table",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: `create table quiz (pk int not null primary key comment 'tag:42');`},
|
||||
tc.Query{Query: `create table test (pk int not null primary key comment 'tag:10');`},
|
||||
tc.CommitAll{Message: "created tables test and quiz"},
|
||||
tc.Query{Query: `drop table quiz;`},
|
||||
tc.CommitAll{Message: "added column c0 to test on branch other"},
|
||||
tc.Query{Query: `alter table test add column c1 int comment 'tag:42';`},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedErrStr: "two different columns with the same tag",
|
||||
},
|
||||
}
|
||||
|
||||
func TestUniqueTags(t *testing.T) {
|
||||
for _, test := range UniqueTagsTests {
|
||||
t.Run(test.Name, func(t *testing.T) {
|
||||
testUniqueTags(t, test)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testUniqueTags(t *testing.T, test UniqueTagsTest) {
|
||||
dEnv := dtestutils.CreateTestEnv()
|
||||
|
||||
var ee error
|
||||
for idx, cmd := range test.Commands {
|
||||
require.NoError(t, ee)
|
||||
fmt.Println(fmt.Sprintf("%d: %s: %s", idx, cmd.CommandString(), cmd))
|
||||
ee = cmd.Exec(t, dEnv)
|
||||
}
|
||||
|
||||
if test.ExpectedErrStr != "" {
|
||||
require.Error(t, ee, test.ExpectedErrStr)
|
||||
} else {
|
||||
spec := dEnv.RepoState.CWBHeadRef()
|
||||
require.Equal(t, "refs/heads/"+test.ExpectedBranch, spec.String())
|
||||
|
||||
r, err := dEnv.WorkingRoot(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
tbl, ok, err := r.GetTable(context.Background(), test.TableName)
|
||||
require.NoError(t, err)
|
||||
require.True(t, ok)
|
||||
|
||||
sch, err := tbl.GetSchema(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.ExpectedSchema, sch)
|
||||
}
|
||||
}
|
||||
@@ -75,10 +75,10 @@ type mergeForeignKeyTest struct {
|
||||
|
||||
var setupCommon = []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create table test (" +
|
||||
"pk int not null primary key comment 'tag:0'," +
|
||||
"c1 int not null comment 'tag:1'," +
|
||||
"c2 int comment 'tag:2'," +
|
||||
"c3 int comment 'tag:3');"}},
|
||||
"pk int not null primary key," +
|
||||
"c1 int not null," +
|
||||
"c2 int," +
|
||||
"c3 int);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c1_idx on test(c1)"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "setup common"}},
|
||||
@@ -91,34 +91,34 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "add cols, drop cols, merge",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop column c2;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c8 int comment 'tag:8';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c8 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop column c3;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c9 int comment 'tag:9';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c9 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c8", uint64(8), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c9", uint64(9), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c8", uint64(12393), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c9", uint64(4508), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -135,11 +135,11 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -156,11 +156,11 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c3_idx", []uint64{3}, []uint64{3, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c3_idx", []uint64{4696}, []uint64{4696, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -177,11 +177,11 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c22", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c33", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c22", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c33", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -194,33 +194,33 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_index", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_index", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "add same column on both branches, merge",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int comment 'tag:4';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int comment 'tag:4'"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c4", uint64(4), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c4", uint64(1716), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
{
|
||||
@@ -237,12 +237,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
newColTypeInfo("pk", uint64(0), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(1), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(2), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(3), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{1}, []uint64{1, 0}, nil, schema.IndexProperties{}),
|
||||
schema.NewIndex("c3_idx", []uint64{3}, []uint64{3, 0}, nil, schema.IndexProperties{}),
|
||||
newColTypeInfo("pk", uint64(3228), typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c1", uint64(8201), typeinfo.Int32Type, false, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c2", uint64(8539), typeinfo.Int32Type, false),
|
||||
newColTypeInfo("c3", uint64(4696), typeinfo.Int32Type, false)),
|
||||
schema.NewIndex("c1_idx", []uint64{8201}, []uint64{8201, 3228}, nil, schema.IndexProperties{}),
|
||||
schema.NewIndex("c3_idx", []uint64{4696}, []uint64{4696, 3228}, nil, schema.IndexProperties{}),
|
||||
),
|
||||
},
|
||||
}
|
||||
@@ -258,12 +258,12 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
name: "column name collisions",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c3 to c4;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column C6 int comment 'tag:13';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column C6 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c2 to c4;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 int comment 'tag:19';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
@@ -301,8 +301,8 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
IdxConflicts: []merge.IdxConflict{
|
||||
{
|
||||
Kind: merge.NameCollision,
|
||||
Ours: schema.NewIndex("both", []uint64{1, 2}, []uint64{1, 2, 0}, nil, schema.IndexProperties{}),
|
||||
Theirs: schema.NewIndex("both", []uint64{2, 3}, []uint64{2, 3, 0}, nil, schema.IndexProperties{}),
|
||||
Ours: schema.NewIndex("both", []uint64{8201, 8539}, []uint64{8201, 8539, 3228}, nil, schema.IndexProperties{}),
|
||||
Theirs: schema.NewIndex("both", []uint64{8539, 4696}, []uint64{8539, 4696, 3228}, nil, schema.IndexProperties{}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -310,13 +310,14 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
{
|
||||
name: "column definition collision",
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c40 int comment 'tag:4';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 bigint comment 'tag:6';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c40 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 bigint;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c44 int comment 'tag:4';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 tinyint comment 'tag:6';"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c40 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c40 to c44;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 tinyint;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
@@ -326,13 +327,13 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
ColConflicts: []merge.ColConflict{
|
||||
{
|
||||
Kind: merge.TagCollision,
|
||||
Ours: newColTypeInfo("c40", uint64(4), typeinfo.Int32Type, false),
|
||||
Theirs: newColTypeInfo("c44", uint64(4), typeinfo.Int32Type, false),
|
||||
Ours: newColTypeInfo("c40", uint64(679), typeinfo.Int32Type, false),
|
||||
Theirs: newColTypeInfo("c44", uint64(679), typeinfo.Int32Type, false),
|
||||
},
|
||||
{
|
||||
Kind: merge.TagCollision,
|
||||
Ours: newColTypeInfo("c6", uint64(6), typeinfo.Int64Type, false),
|
||||
Theirs: newColTypeInfo("c6", uint64(6), typeinfo.Int8Type, false),
|
||||
Ours: newColTypeInfo("c6", uint64(10774), typeinfo.Int64Type, false),
|
||||
Theirs: newColTypeInfo("c6", uint64(10774), typeinfo.Int8Type, false),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -354,8 +355,8 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
IdxConflicts: []merge.IdxConflict{
|
||||
{
|
||||
Kind: merge.TagCollision,
|
||||
Ours: schema.NewIndex("c3_idx", []uint64{3}, []uint64{3, 0}, nil, schema.IndexProperties{}),
|
||||
Theirs: schema.NewIndex("c3_index", []uint64{3}, []uint64{3, 0}, nil, schema.IndexProperties{}),
|
||||
Ours: schema.NewIndex("c3_idx", []uint64{4696}, []uint64{4696, 3228}, nil, schema.IndexProperties{}),
|
||||
Theirs: schema.NewIndex("c3_index", []uint64{4696}, []uint64{4696, 3228}, nil, schema.IndexProperties{}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -364,15 +365,15 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
|
||||
var setupForeignKeyTests = []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create table test (" +
|
||||
"pk int not null primary key comment 'tag:0'," +
|
||||
"t1 int not null comment 'tag:1'," +
|
||||
"t2 int comment 'tag:2'," +
|
||||
"t3 int comment 'tag:3');"}},
|
||||
"pk int not null primary key," +
|
||||
"t1 int not null," +
|
||||
"t2 int," +
|
||||
"t3 int);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add index t1_idx (t1);"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create table quiz (" +
|
||||
"pk int not null primary key comment 'tag:10'," +
|
||||
"q1 int not null comment 'tag:11'," +
|
||||
"q2 int not null comment 'tag:12'," +
|
||||
"pk int not null primary key," +
|
||||
"q1 int not null," +
|
||||
"q2 int not null," +
|
||||
"index q2_idx (q2)," +
|
||||
"constraint q1_fk foreign key (q1) references test(t1));"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
@@ -388,10 +389,10 @@ var mergeForeignKeyTests = []mergeForeignKeyTest{
|
||||
Name: "q1_fk",
|
||||
TableName: "quiz",
|
||||
TableIndex: "q1",
|
||||
TableColumns: []uint64{11},
|
||||
TableColumns: []uint64{13001},
|
||||
ReferencedTableName: "test",
|
||||
ReferencedTableIndex: "t1_idx",
|
||||
ReferencedTableColumns: []uint64{1}}),
|
||||
ReferencedTableColumns: []uint64{12111}}),
|
||||
expFKConflict: []merge.FKConflict{},
|
||||
},
|
||||
//{
|
||||
@@ -435,7 +436,7 @@ func schemaFromColsAndIdxs(allCols *schema.ColCollection, indexes ...schema.Inde
|
||||
}
|
||||
|
||||
func newColTypeInfo(name string, tag uint64, typeInfo typeinfo.TypeInfo, partOfPK bool, constraints ...schema.ColConstraint) schema.Column {
|
||||
c, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", constraints...)
|
||||
c, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", "", constraints...)
|
||||
if err != nil {
|
||||
panic("could not create column")
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ func NewJoiner(namedSchemas []NamedSchema, namers map[string]ColNamingFunc) (*Jo
|
||||
allCols.IterInSortedOrder(func(srcTag uint64, col schema.Column) (stop bool) {
|
||||
newColName := namer(col.Name)
|
||||
var newCol schema.Column
|
||||
newCol, err = schema.NewColumnWithTypeInfo(newColName, destTag, col.TypeInfo, false, col.Default)
|
||||
newCol, err = schema.NewColumnWithTypeInfo(newColName, destTag, col.TypeInfo, false, col.Default, col.Comment)
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -103,9 +103,9 @@ func TestUnneccessaryConversion(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSpecialBoolHandling(t *testing.T) {
|
||||
col1, err := schema.NewColumnWithTypeInfo("pk", 0, typeinfo.Int64Type, true, "")
|
||||
col1, err := schema.NewColumnWithTypeInfo("pk", 0, typeinfo.Int64Type, true, "", "")
|
||||
require.NoError(t, err)
|
||||
col2, err := schema.NewColumnWithTypeInfo("v", 1, typeinfo.PseudoBoolType, false, "")
|
||||
col2, err := schema.NewColumnWithTypeInfo("v", 1, typeinfo.PseudoBoolType, false, "", "")
|
||||
require.NoError(t, err)
|
||||
colColl, _ := schema.NewColCollection(col1, col2)
|
||||
sch := schema.SchemaFromCols(colColl)
|
||||
|
||||
@@ -44,7 +44,7 @@ type ColumnOrder struct {
|
||||
// table, since we must write a value for each row. If the column is not nullable, a default value must be provided.
|
||||
//
|
||||
// Returns an error if the column added conflicts with the existing schema in tag or name.
|
||||
func AddColumnToTable(ctx context.Context, root *doltdb.RootValue, tbl *doltdb.Table, tblName string, tag uint64, newColName string, typeInfo typeinfo.TypeInfo, nullable Nullable, defaultVal string, order *ColumnOrder) (*doltdb.Table, error) {
|
||||
func AddColumnToTable(ctx context.Context, root *doltdb.RootValue, tbl *doltdb.Table, tblName string, tag uint64, newColName string, typeInfo typeinfo.TypeInfo, nullable Nullable, defaultVal, comment string, order *ColumnOrder) (*doltdb.Table, error) {
|
||||
sch, err := tbl.GetSchema(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -54,7 +54,7 @@ func AddColumnToTable(ctx context.Context, root *doltdb.RootValue, tbl *doltdb.T
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newSchema, err := addColumnToSchema(sch, tag, newColName, typeInfo, nullable, order, defaultVal)
|
||||
newSchema, err := addColumnToSchema(sch, tag, newColName, typeInfo, nullable, order, defaultVal, comment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -131,8 +131,8 @@ func updateTableWithNewSchema(ctx context.Context, tblName string, tbl *doltdb.T
|
||||
}
|
||||
|
||||
// addColumnToSchema creates a new schema with a column as specified by the params.
|
||||
func addColumnToSchema(sch schema.Schema, tag uint64, newColName string, typeInfo typeinfo.TypeInfo, nullable Nullable, order *ColumnOrder, defaultVal string) (schema.Schema, error) {
|
||||
newCol, err := createColumn(nullable, newColName, tag, typeInfo, defaultVal)
|
||||
func addColumnToSchema(sch schema.Schema, tag uint64, newColName string, typeInfo typeinfo.TypeInfo, nullable Nullable, order *ColumnOrder, defaultVal, comment string) (schema.Schema, error) {
|
||||
newCol, err := createColumn(nullable, newColName, tag, typeInfo, defaultVal, comment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -162,11 +162,11 @@ func addColumnToSchema(sch schema.Schema, tag uint64, newColName string, typeInf
|
||||
return newSch, nil
|
||||
}
|
||||
|
||||
func createColumn(nullable Nullable, newColName string, tag uint64, typeInfo typeinfo.TypeInfo, defaultVal string) (schema.Column, error) {
|
||||
func createColumn(nullable Nullable, newColName string, tag uint64, typeInfo typeinfo.TypeInfo, defaultVal, comment string) (schema.Column, error) {
|
||||
if nullable {
|
||||
return schema.NewColumnWithTypeInfo(newColName, tag, typeInfo, false, defaultVal)
|
||||
return schema.NewColumnWithTypeInfo(newColName, tag, typeInfo, false, defaultVal, comment)
|
||||
} else {
|
||||
return schema.NewColumnWithTypeInfo(newColName, tag, typeInfo, false, defaultVal, schema.NotNullConstraint{})
|
||||
return schema.NewColumnWithTypeInfo(newColName, tag, typeInfo, false, defaultVal, comment, schema.NotNullConstraint{})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -263,7 +263,7 @@ func TestAddColumnToTable(t *testing.T) {
|
||||
tbl, _, err := root.GetTable(ctx, tableName)
|
||||
assert.NoError(t, err)
|
||||
|
||||
updatedTable, err := AddColumnToTable(ctx, root, tbl, tableName, tt.tag, tt.newColName, typeinfo.FromKind(tt.colKind), tt.nullable, tt.defaultVal, tt.order)
|
||||
updatedTable, err := AddColumnToTable(ctx, root, tbl, tableName, tt.tag, tt.newColName, typeinfo.FromKind(tt.colKind), tt.nullable, tt.defaultVal, "", tt.order)
|
||||
if len(tt.expectedErr) > 0 {
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), tt.expectedErr)
|
||||
|
||||
@@ -26,10 +26,10 @@ import (
|
||||
"github.com/liquidata-inc/dolt/go/store/types"
|
||||
)
|
||||
|
||||
var firstNameCol = Column{"first", 0, types.StringKind, false, typeinfo.StringDefaultType, "", nil}
|
||||
var lastNameCol = Column{"last", 1, types.StringKind, false, typeinfo.StringDefaultType, "", nil}
|
||||
var firstNameCapsCol = Column{"FiRsT", 2, types.StringKind, false, typeinfo.StringDefaultType, "", nil}
|
||||
var lastNameCapsCol = Column{"LAST", 3, types.StringKind, false, typeinfo.StringDefaultType, "", nil}
|
||||
var firstNameCol = Column{"first", 0, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil}
|
||||
var lastNameCol = Column{"last", 1, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil}
|
||||
var firstNameCapsCol = Column{"FiRsT", 2, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil}
|
||||
var lastNameCapsCol = Column{"LAST", 3, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil}
|
||||
|
||||
func TestGetByNameAndTag(t *testing.T) {
|
||||
cols := []Column{firstNameCol, lastNameCol, firstNameCapsCol, lastNameCapsCol}
|
||||
@@ -111,7 +111,7 @@ func TestNewColCollectionErrorHandling(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "tag collision",
|
||||
cols: []Column{firstNameCol, lastNameCol, {"collision", 0, types.StringKind, false, typeinfo.StringDefaultType, "", nil}},
|
||||
cols: []Column{firstNameCol, lastNameCol, {"collision", 0, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil}},
|
||||
expectedErr: ErrColTagCollision,
|
||||
},
|
||||
}
|
||||
@@ -127,18 +127,18 @@ func TestNewColCollectionErrorHandling(t *testing.T) {
|
||||
|
||||
func TestAppendAndItrInSortOrder(t *testing.T) {
|
||||
cols := []Column{
|
||||
{"0", 0, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"2", 2, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"4", 4, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"3", 3, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"1", 1, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"0", 0, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"2", 2, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"4", 4, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"3", 3, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"1", 1, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
}
|
||||
cols2 := []Column{
|
||||
{"7", 7, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"9", 9, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"5", 5, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"8", 8, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"6", 6, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{"7", 7, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"9", 9, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"5", 5, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"8", 8, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{"6", 6, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
}
|
||||
|
||||
colColl, _ := NewColCollection(cols...)
|
||||
|
||||
@@ -43,6 +43,7 @@ var (
|
||||
false,
|
||||
typeinfo.UnknownType,
|
||||
"",
|
||||
"",
|
||||
nil,
|
||||
}
|
||||
)
|
||||
@@ -74,6 +75,9 @@ type Column struct {
|
||||
// Default is the default value of this column. This is the string representation of a sql.Expression.
|
||||
Default string
|
||||
|
||||
// Comment is the comment for this column.
|
||||
Comment string
|
||||
|
||||
// Constraints are rules that can be checked on each column to say if the columns value is valid
|
||||
Constraints []ColConstraint
|
||||
}
|
||||
@@ -81,7 +85,7 @@ type Column struct {
|
||||
// NewColumn creates a Column instance with the default type info for the NomsKind
|
||||
func NewColumn(name string, tag uint64, kind types.NomsKind, partOfPK bool, constraints ...ColConstraint) Column {
|
||||
typeInfo := typeinfo.FromKind(kind)
|
||||
col, err := NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", constraints...)
|
||||
col, err := NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, "", "", constraints...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@@ -89,7 +93,7 @@ func NewColumn(name string, tag uint64, kind types.NomsKind, partOfPK bool, cons
|
||||
}
|
||||
|
||||
// NewColumnWithTypeInfo creates a Column instance with the given type info.
|
||||
func NewColumnWithTypeInfo(name string, tag uint64, typeInfo typeinfo.TypeInfo, partOfPK bool, defaultVal string, constraints ...ColConstraint) (Column, error) {
|
||||
func NewColumnWithTypeInfo(name string, tag uint64, typeInfo typeinfo.TypeInfo, partOfPK bool, defaultVal, comment string, constraints ...ColConstraint) (Column, error) {
|
||||
for _, c := range constraints {
|
||||
if c == nil {
|
||||
return Column{}, errors.New("nil passed as a constraint")
|
||||
@@ -107,6 +111,7 @@ func NewColumnWithTypeInfo(name string, tag uint64, typeInfo typeinfo.TypeInfo,
|
||||
partOfPK,
|
||||
typeInfo,
|
||||
defaultVal,
|
||||
comment,
|
||||
constraints,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -46,6 +46,8 @@ type encodedColumn struct {
|
||||
|
||||
Default string `noms:"default,omitempty" json:"default,omitempty"`
|
||||
|
||||
Comment string `noms:"comment,omitempty" json:"comment,omitempty"`
|
||||
|
||||
Constraints []encodedConstraint `noms:"col_constraints" json:"col_constraints"`
|
||||
|
||||
// NB: all new fields must have the 'omitempty' annotation. See comment above
|
||||
@@ -84,6 +86,7 @@ func encodeColumn(col schema.Column) encodedColumn {
|
||||
col.IsPartOfPK,
|
||||
encodeTypeInfo(col.TypeInfo),
|
||||
col.Default,
|
||||
col.Comment,
|
||||
encodeAllColConstraints(col.Constraints),
|
||||
}
|
||||
}
|
||||
@@ -102,7 +105,7 @@ func (nfd encodedColumn) decodeColumn() (schema.Column, error) {
|
||||
return schema.Column{}, errors.New("cannot decode column due to unknown schema format")
|
||||
}
|
||||
colConstraints := decodeAllColConstraint(nfd.Constraints)
|
||||
return schema.NewColumnWithTypeInfo(nfd.Name, nfd.Tag, typeInfo, nfd.IsPartOfPK, nfd.Default, colConstraints...)
|
||||
return schema.NewColumnWithTypeInfo(nfd.Name, nfd.Tag, typeInfo, nfd.IsPartOfPK, nfd.Default, nfd.Comment, colConstraints...)
|
||||
}
|
||||
|
||||
type encodedConstraint struct {
|
||||
|
||||
@@ -42,6 +42,7 @@ func createTestSchema() schema.Schema {
|
||||
}
|
||||
for i := range columns {
|
||||
columns[i].Default = `""`
|
||||
columns[i].Comment = "hello world"
|
||||
}
|
||||
|
||||
colColl, _ := schema.NewColCollection(columns...)
|
||||
@@ -148,7 +149,7 @@ func TestTypeInfoMarshalling(t *testing.T) {
|
||||
t.Run(sqlType.String(), func(t *testing.T) {
|
||||
ti, err := typeinfo.FromSqlType(sqlType)
|
||||
require.NoError(t, err)
|
||||
col, err := schema.NewColumnWithTypeInfo("pk", 1, ti, true, "")
|
||||
col, err := schema.NewColumnWithTypeInfo("pk", 1, ti, true, "", "")
|
||||
require.NoError(t, err)
|
||||
colColl, err := schema.NewColCollection(col)
|
||||
require.NoError(t, err)
|
||||
@@ -209,6 +210,8 @@ type testEncodedColumn struct {
|
||||
|
||||
Default string `noms:"default" json:"default"`
|
||||
|
||||
Comment string `noms:"comment" json:"comment"`
|
||||
|
||||
Constraints []encodedConstraint `noms:"col_constraints" json:"col_constraints"`
|
||||
}
|
||||
|
||||
@@ -239,7 +242,7 @@ func (tec testEncodedColumn) decodeColumn() (schema.Column, error) {
|
||||
return schema.Column{}, errors.New("cannot decode column due to unknown schema format")
|
||||
}
|
||||
colConstraints := decodeAllColConstraint(tec.Constraints)
|
||||
return schema.NewColumnWithTypeInfo(tec.Name, tec.Tag, typeInfo, tec.IsPartOfPK, tec.Default, colConstraints...)
|
||||
return schema.NewColumnWithTypeInfo(tec.Name, tec.Tag, typeInfo, tec.IsPartOfPK, tec.Default, tec.Comment, colConstraints...)
|
||||
}
|
||||
|
||||
func (tsd testSchemaData) decodeSchema() (schema.Schema, error) {
|
||||
|
||||
@@ -47,14 +47,14 @@ var ageVal = types.Uint(53)
|
||||
var titleVal = types.NullValue
|
||||
|
||||
var pkCols = []Column{
|
||||
{lnColName, lnColTag, types.StringKind, true, typeinfo.StringDefaultType, "", nil},
|
||||
{fnColName, fnColTag, types.StringKind, true, typeinfo.StringDefaultType, "", nil},
|
||||
{lnColName, lnColTag, types.StringKind, true, typeinfo.StringDefaultType, "", "", nil},
|
||||
{fnColName, fnColTag, types.StringKind, true, typeinfo.StringDefaultType, "", "", nil},
|
||||
}
|
||||
var nonPkCols = []Column{
|
||||
{addrColName, addrColTag, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{ageColName, ageColTag, types.UintKind, false, typeinfo.FromKind(types.UintKind), "", nil},
|
||||
{titleColName, titleColTag, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{reservedColName, reservedColTag, types.StringKind, false, typeinfo.StringDefaultType, "", nil},
|
||||
{addrColName, addrColTag, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{ageColName, ageColTag, types.UintKind, false, typeinfo.FromKind(types.UintKind), "", "", nil},
|
||||
{titleColName, titleColTag, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
{reservedColName, reservedColTag, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil},
|
||||
}
|
||||
|
||||
var allCols = append(append([]Column(nil), pkCols...), nonPkCols...)
|
||||
@@ -98,7 +98,7 @@ func TestValidateForInsert(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Name collision", func(t *testing.T) {
|
||||
cols := append(allCols, Column{titleColName, 100, types.StringKind, false, typeinfo.StringDefaultType, "", nil})
|
||||
cols := append(allCols, Column{titleColName, 100, types.StringKind, false, typeinfo.StringDefaultType, "", "", nil})
|
||||
colColl, err := NewColCollection(cols...)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ var nameCollisionWithSch1 = mustSchema([]Column{
|
||||
|
||||
var tagCollisionWithSch1 = mustSchema([]Column{
|
||||
strCol("a", 1, true),
|
||||
{"collision", 2, types.IntKind, false, typeinfo.Int32Type, "", nil},
|
||||
{"collision", 2, types.IntKind, false, typeinfo.Int32Type, "", "", nil},
|
||||
})
|
||||
|
||||
type SuperSchemaTest struct {
|
||||
@@ -238,5 +238,5 @@ func mustColColl(cols []Column) *ColCollection {
|
||||
}
|
||||
|
||||
func strCol(name string, tag uint64, isPK bool) Column {
|
||||
return Column{name, tag, types.StringKind, isPK, typeinfo.StringDefaultType, "", nil}
|
||||
return Column{name, tag, types.StringKind, isPK, typeinfo.StringDefaultType, "", "", nil}
|
||||
}
|
||||
|
||||
@@ -119,7 +119,7 @@ func createAppearancesTestSchema() schema.Schema {
|
||||
}
|
||||
|
||||
func newColumnWithTypeInfo(name string, tag uint64, info typeinfo.TypeInfo, partOfPk bool, constraints ...schema.ColConstraint) schema.Column {
|
||||
col, err := schema.NewColumnWithTypeInfo(name, tag, info, partOfPk, "", constraints...)
|
||||
col, err := schema.NewColumnWithTypeInfo(name, tag, info, partOfPk, "", "", constraints...)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("unexpected error creating column: %s", err.Error()))
|
||||
}
|
||||
|
||||
@@ -134,11 +134,23 @@ func schemaNewColumn(t *testing.T, name string, tag uint64, sqlType sql.Type, pa
|
||||
func schemaNewColumnWDefVal(t *testing.T, name string, tag uint64, sqlType sql.Type, partOfPK bool, defaultVal string, constraints ...schema.ColConstraint) schema.Column {
|
||||
typeInfo, err := typeinfo.FromSqlType(sqlType)
|
||||
require.NoError(t, err)
|
||||
col, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, defaultVal, constraints...)
|
||||
col, err := schema.NewColumnWithTypeInfo(name, tag, typeInfo, partOfPK, defaultVal, "", constraints...)
|
||||
require.NoError(t, err)
|
||||
return col
|
||||
}
|
||||
|
||||
func equalSchemas(t *testing.T, expectedSch schema.Schema, sch schema.Schema) {
|
||||
require.NotNil(t, expectedSch)
|
||||
require.NotNil(t, sch)
|
||||
require.Equal(t, expectedSch.GetAllCols().Size(), sch.GetAllCols().Size())
|
||||
cols := sch.GetAllCols().GetColumns()
|
||||
for i, expectedCol := range expectedSch.GetAllCols().GetColumns() {
|
||||
col := cols[i]
|
||||
col.Tag = expectedCol.Tag
|
||||
assert.Equal(t, expectedCol, col)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this shouldn't be here
|
||||
func CreateWorkingRootUpdate() map[string]envtestutils.TableUpdate {
|
||||
return map[string]envtestutils.TableUpdate{
|
||||
|
||||
@@ -122,6 +122,11 @@ func (tc *tableCache) AllForRoot(root *doltdb.RootValue) (map[string]sql.Table,
|
||||
return nil, false
|
||||
}
|
||||
|
||||
type SqlDatabase interface {
|
||||
sql.Database
|
||||
GetRoot(*sql.Context) (*doltdb.RootValue, error)
|
||||
}
|
||||
|
||||
// Database implements sql.Database for a dolt DB.
|
||||
type Database struct {
|
||||
name string
|
||||
@@ -132,7 +137,7 @@ type Database struct {
|
||||
tc *tableCache
|
||||
}
|
||||
|
||||
var _ sql.Database = Database{}
|
||||
var _ SqlDatabase = Database{}
|
||||
var _ sql.VersionedDatabase = Database{}
|
||||
var _ sql.TableDropper = Database{}
|
||||
var _ sql.TableCreator = Database{}
|
||||
@@ -388,9 +393,9 @@ func (db Database) getTable(ctx context.Context, root *doltdb.RootValue, tableNa
|
||||
if doltdb.IsReadOnlySystemTable(tableName) {
|
||||
table = &readonlyTable
|
||||
} else if doltdb.HasDoltPrefix(tableName) {
|
||||
table = &WritableDoltTable{DoltTable: readonlyTable}
|
||||
table = &WritableDoltTable{DoltTable: readonlyTable, db: db}
|
||||
} else {
|
||||
table = &AlterableDoltTable{WritableDoltTable{DoltTable: readonlyTable}}
|
||||
table = &AlterableDoltTable{WritableDoltTable{DoltTable: readonlyTable, db: db}}
|
||||
}
|
||||
|
||||
db.tc.Put(tableName, root, table)
|
||||
@@ -574,17 +579,6 @@ func (db Database) CreateTable(ctx *sql.Context, tableName string, sch sql.Schem
|
||||
return ErrInvalidTableName.New(tableName)
|
||||
}
|
||||
|
||||
for _, col := range sch {
|
||||
commentTag := sqleSchema.ExtractTag(col)
|
||||
if commentTag == schema.InvalidTag {
|
||||
// we'll replace this invalid tag
|
||||
continue
|
||||
}
|
||||
if commentTag >= schema.ReservedTagMin {
|
||||
return fmt.Errorf("tag %d is within the reserved tag space", commentTag)
|
||||
}
|
||||
}
|
||||
|
||||
return db.createTable(ctx, tableName, sch)
|
||||
}
|
||||
|
||||
|
||||
@@ -30,14 +30,13 @@ type DoltIndex interface {
|
||||
sql.Index
|
||||
sql.AscendIndex
|
||||
sql.DescendIndex
|
||||
DoltDatabase() Database
|
||||
Schema() schema.Schema
|
||||
TableData() types.Map
|
||||
}
|
||||
|
||||
type doltIndex struct {
|
||||
cols []schema.Column
|
||||
db Database
|
||||
db sql.Database
|
||||
id string
|
||||
indexRowData types.Map
|
||||
indexSch schema.Schema
|
||||
@@ -121,12 +120,7 @@ func (di *doltIndex) DescendRange(lessOrEqual, greaterOrEqual []interface{}) (sq
|
||||
|
||||
// Database implement sql.Index
|
||||
func (di *doltIndex) Database() string {
|
||||
return di.db.name
|
||||
}
|
||||
|
||||
// DoltDatabase returns the dolt database that created this index.
|
||||
func (di *doltIndex) DoltDatabase() Database {
|
||||
return di.db
|
||||
return di.db.Name()
|
||||
}
|
||||
|
||||
// Expressions implements sql.Index
|
||||
|
||||
@@ -17,8 +17,6 @@ package schema
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
sqle "github.com/liquidata-inc/go-mysql-server"
|
||||
"github.com/liquidata-inc/go-mysql-server/sql"
|
||||
@@ -29,12 +27,9 @@ import (
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/typeinfo"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
"github.com/liquidata-inc/dolt/go/store/types"
|
||||
)
|
||||
|
||||
var ErrPartiallyDefinedTags = fmt.Errorf("must define tags for all or none of the schema columns")
|
||||
|
||||
// ApplyDefaults applies the default values to the given indices, returning the resulting row.
|
||||
func ApplyDefaults(ctx context.Context, doltSchema schema.Schema, sqlSchema sql.Schema, indicesOfColumns []int, dRow row.Row) (row.Row, error) {
|
||||
if len(indicesOfColumns) == 0 {
|
||||
@@ -140,7 +135,8 @@ func FromDoltSchema(tableName string, sch schema.Schema) (sql.Schema, error) {
|
||||
Nullable: col.IsNullable(),
|
||||
Source: tableName,
|
||||
PrimaryKey: col.IsPartOfPK,
|
||||
Comment: fmt.Sprintf("tag:%d", col.Tag),
|
||||
Comment: col.Comment,
|
||||
Extra: fmt.Sprintf("tag:%d", tag),
|
||||
},
|
||||
Default: col.Default,
|
||||
}
|
||||
@@ -157,39 +153,20 @@ func ToDoltSchema(ctx context.Context, root *doltdb.RootValue, tableName string,
|
||||
var cols []schema.Column
|
||||
var err error
|
||||
|
||||
// Users must define all or none of the column tags
|
||||
userDefinedTags := ExtractTag(sqlSchema[0]) != schema.InvalidTag
|
||||
var tags []uint64
|
||||
|
||||
if userDefinedTags {
|
||||
for _, col := range sqlSchema {
|
||||
commentTag := ExtractTag(col)
|
||||
tags = append(tags, commentTag)
|
||||
if commentTag == schema.InvalidTag {
|
||||
return nil, ErrPartiallyDefinedTags
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// generate tags for all columns
|
||||
var names []string
|
||||
var kinds []types.NomsKind
|
||||
for _, col := range sqlSchema {
|
||||
names = append(names, col.Name)
|
||||
ti, err := typeinfo.FromSqlType(col.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
kinds = append(kinds, ti.NomsKind())
|
||||
|
||||
// check for user defined tags
|
||||
if ExtractTag(col) != schema.InvalidTag {
|
||||
return nil, ErrPartiallyDefinedTags
|
||||
}
|
||||
}
|
||||
tags, err = root.GenerateTagsForNewColumns(ctx, tableName, names, kinds)
|
||||
// generate tags for all columns
|
||||
var names []string
|
||||
var kinds []types.NomsKind
|
||||
for _, col := range sqlSchema {
|
||||
names = append(names, col.Name)
|
||||
ti, err := typeinfo.FromSqlType(col.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
kinds = append(kinds, ti.NomsKind())
|
||||
}
|
||||
tags, err := root.GenerateTagsForNewColumns(ctx, tableName, names, kinds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(tags) != len(sqlSchema) {
|
||||
@@ -228,24 +205,5 @@ func ToDoltCol(tag uint64, col *sql.Column) (schema.Column, error) {
|
||||
return schema.Column{}, err
|
||||
}
|
||||
|
||||
return schema.NewColumnWithTypeInfo(col.Name, tag, typeInfo, col.PrimaryKey, col.Default.String(), constraints...)
|
||||
}
|
||||
|
||||
// ExtractTag extracts the optional comment tag from a column type defn, or InvalidTag if it can't be extracted
|
||||
func ExtractTag(col *sql.Column) uint64 {
|
||||
if len(col.Comment) == 0 {
|
||||
return schema.InvalidTag
|
||||
}
|
||||
|
||||
i := strings.Index(col.Comment, sqlfmt.TagCommentPrefix)
|
||||
if i >= 0 {
|
||||
startIdx := i + len(sqlfmt.TagCommentPrefix)
|
||||
tag, err := strconv.ParseUint(col.Comment[startIdx:], 10, 64)
|
||||
if err != nil {
|
||||
return schema.InvalidTag
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
return schema.InvalidTag
|
||||
return schema.NewColumnWithTypeInfo(col.Name, tag, typeInfo, col.PrimaryKey, col.Default.String(), col.Comment, constraints...)
|
||||
}
|
||||
|
||||
@@ -17,8 +17,6 @@ package sqle
|
||||
import (
|
||||
"github.com/liquidata-inc/go-mysql-server/sql"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
)
|
||||
|
||||
@@ -26,12 +24,12 @@ import (
|
||||
func SchemasTableSchema() sql.Schema {
|
||||
return []*sql.Column{
|
||||
// Currently: `view`.
|
||||
{Name: doltdb.SchemasTablesTypeCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: true, Comment: sqlfmt.FmtColTagComment(doltdb.DoltSchemasTypeTag)},
|
||||
{Name: doltdb.SchemasTablesTypeCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: true},
|
||||
// The name of the database entity.
|
||||
{Name: doltdb.SchemasTablesNameCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: true, Comment: sqlfmt.FmtColTagComment(doltdb.DoltSchemasNameTag)},
|
||||
{Name: doltdb.SchemasTablesNameCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: true},
|
||||
// The schema fragment associated with the database entity.
|
||||
// For example, the SELECT statement for a CREATE VIEW.
|
||||
{Name: doltdb.SchemasTablesFragmentCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: false, Comment: sqlfmt.FmtColTagComment(doltdb.DoltSchemasFragmentTag)},
|
||||
{Name: doltdb.SchemasTablesFragmentCol, Type: sql.Text, Source: doltdb.SchemasTableName, PrimaryKey: false},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -269,11 +269,11 @@ func CompressSchema(sch schema.Schema, colNames ...string) schema.Schema {
|
||||
}
|
||||
} else {
|
||||
cols = make([]schema.Column, sch.GetAllCols().Size())
|
||||
sch.GetAllCols().IterInSortedOrder(func(tag uint64, col schema.Column) (stop bool) {
|
||||
sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
|
||||
col.Tag = itag
|
||||
cols[itag] = col
|
||||
itag++
|
||||
return false
|
||||
return false, nil
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
59
go/libraries/doltcore/sqle/show_create_table.go
Normal file
59
go/libraries/doltcore/sqle/show_create_table.go
Normal file
@@ -0,0 +1,59 @@
|
||||
// Copyright 2020 Liquidata, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package sqle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
sqle "github.com/liquidata-inc/go-mysql-server"
|
||||
"github.com/liquidata-inc/go-mysql-server/sql"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
)
|
||||
|
||||
// These functions cannot be in the sqlfmt package as the reliance on the sqle package creates a circular reference.
|
||||
|
||||
func PrepareCreateTableStmt(ctx context.Context, root *doltdb.RootValue) (*sql.Context, *sqle.Engine, *DoltSession) {
|
||||
dsess := DefaultDoltSession()
|
||||
sqlCtx := sql.NewContext(ctx,
|
||||
sql.WithSession(dsess),
|
||||
sql.WithIndexRegistry(sql.NewIndexRegistry()),
|
||||
sql.WithViewRegistry(sql.NewViewRegistry()))
|
||||
engine := sqle.NewDefault()
|
||||
sqlDb := &UserSpaceDatabase{RootValue: root}
|
||||
engine.AddDatabase(sqlDb)
|
||||
dsess.SetCurrentDatabase(sqlDb.Name())
|
||||
return sqlCtx, engine, dsess
|
||||
}
|
||||
|
||||
func GetCreateTableStmt(ctx *sql.Context, engine *sqle.Engine, tableName string) (string, error) {
|
||||
_, rowIter, err := engine.Query(ctx, fmt.Sprintf("SHOW CREATE TABLE `%s`;", tableName))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
rows, err := sql.RowIterToRows(rowIter)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(rows) != 1 || len(rows[0]) != 2 {
|
||||
return "", fmt.Errorf("unexpected result from SHOW CREATE TABLE")
|
||||
}
|
||||
stmt, ok := rows[0][1].(string)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("expected string statement from SHOW CREATE TABLE")
|
||||
}
|
||||
return stmt + ";", nil
|
||||
}
|
||||
@@ -45,18 +45,18 @@ func TestCreateTable(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Test create single column schema",
|
||||
query: "create table testTable (id int primary key comment 'tag:100')",
|
||||
query: "create table testTable (id int primary key)",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{})),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{})),
|
||||
},
|
||||
{
|
||||
name: "Test create two column schema",
|
||||
query: "create table testTable (id int primary key comment 'tag:100', age int comment 'tag:101')",
|
||||
query: "create table testTable (id int primary key, age int)",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, false)),
|
||||
},
|
||||
{
|
||||
name: "Test syntax error",
|
||||
@@ -98,132 +98,123 @@ func TestCreateTable(t *testing.T) {
|
||||
{
|
||||
name: "Test types",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(255) comment 'tag:102',
|
||||
is_married boolean comment 'tag:103') `,
|
||||
id int primary key,
|
||||
age int,
|
||||
first_name varchar(255),
|
||||
is_married boolean) `,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, false),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, false),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test all supported types",
|
||||
expectedTable: "testTable",
|
||||
query: `create table testTable (
|
||||
c0 int primary key comment 'tag:100',
|
||||
c1 tinyint comment 'tag:101',
|
||||
c2 smallint comment 'tag:102',
|
||||
c3 mediumint comment 'tag:103',
|
||||
c4 integer comment 'tag:104',
|
||||
c5 bigint comment 'tag:105',
|
||||
c6 bool comment 'tag:106',
|
||||
c7 boolean comment 'tag:107',
|
||||
c8 bit(10) comment 'tag:108',
|
||||
c9 text comment 'tag:109',
|
||||
c10 tinytext comment 'tag:110',
|
||||
c11 mediumtext comment 'tag:111',
|
||||
c12 longtext comment 'tag:112',
|
||||
c16 char(5) comment 'tag:116',
|
||||
c17 varchar(255) comment 'tag:117',
|
||||
c18 varchar(80) comment 'tag:118',
|
||||
c19 float comment 'tag:119',
|
||||
c20 double comment 'tag:120',
|
||||
c22 int unsigned comment 'tag:122',
|
||||
c23 tinyint unsigned comment 'tag:123',
|
||||
c24 smallint unsigned comment 'tag:124',
|
||||
c25 mediumint unsigned comment 'tag:125',
|
||||
c26 bigint unsigned comment 'tag:126')`,
|
||||
c0 int primary key,
|
||||
c1 tinyint,
|
||||
c2 smallint,
|
||||
c3 mediumint,
|
||||
c4 integer,
|
||||
c5 bigint,
|
||||
c6 bool,
|
||||
c7 boolean,
|
||||
c8 bit(10),
|
||||
c9 text,
|
||||
c10 tinytext,
|
||||
c11 mediumtext,
|
||||
c12 longtext,
|
||||
c16 char(5),
|
||||
c17 varchar(255),
|
||||
c18 varchar(80),
|
||||
c19 float,
|
||||
c20 double,
|
||||
c22 int unsigned,
|
||||
c23 tinyint unsigned,
|
||||
c24 smallint unsigned,
|
||||
c25 mediumint unsigned,
|
||||
c26 bigint unsigned)`,
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "c0", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "c1", 101, sql.Int8, false),
|
||||
schemaNewColumn(t, "c2", 102, sql.Int16, false),
|
||||
schemaNewColumn(t, "c3", 103, sql.Int24, false),
|
||||
schemaNewColumn(t, "c4", 104, sql.Int32, false),
|
||||
schemaNewColumn(t, "c5", 105, sql.Int64, false),
|
||||
schemaNewColumn(t, "c6", 106, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c7", 107, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c8", 108, sql.MustCreateBitType(10), false),
|
||||
schemaNewColumn(t, "c9", 109, sql.Text, false),
|
||||
schemaNewColumn(t, "c10", 110, sql.TinyText, false),
|
||||
schemaNewColumn(t, "c11", 111, sql.MediumText, false),
|
||||
schemaNewColumn(t, "c12", 112, sql.LongText, false),
|
||||
schemaNewColumn(t, "c0", 594, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "c1", 601, sql.Int8, false),
|
||||
schemaNewColumn(t, "c2", 14542, sql.Int16, false),
|
||||
schemaNewColumn(t, "c3", 13309, sql.Int24, false),
|
||||
schemaNewColumn(t, "c4", 15884, sql.Int32, false),
|
||||
schemaNewColumn(t, "c5", 14619, sql.Int64, false),
|
||||
schemaNewColumn(t, "c6", 13192, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c7", 5981, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c8", 14871, sql.MustCreateBitType(10), false),
|
||||
schemaNewColumn(t, "c9", 4167, sql.Text, false),
|
||||
schemaNewColumn(t, "c10", 1965, sql.TinyText, false),
|
||||
schemaNewColumn(t, "c11", 12860, sql.MediumText, false),
|
||||
schemaNewColumn(t, "c12", 7155, sql.LongText, false),
|
||||
//schemaNewColumn(t, "c13", 113, sql.TinyBlob, false),
|
||||
//schemaNewColumn(t, "c14", 114, sql.Blob, false),
|
||||
//schemaNewColumn(t, "c15", 115, sql.LongBlob, false),
|
||||
schemaNewColumn(t, "c16", 116, sql.MustCreateStringWithDefaults(sqltypes.Char, 5), false),
|
||||
schemaNewColumn(t, "c17", 117, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "c18", 118, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "c19", 119, sql.Float32, false),
|
||||
schemaNewColumn(t, "c20", 120, sql.Float64, false),
|
||||
schemaNewColumn(t, "c16", 15859, sql.MustCreateStringWithDefaults(sqltypes.Char, 5), false),
|
||||
schemaNewColumn(t, "c17", 11710, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "c18", 6838, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "c19", 9377, sql.Float32, false),
|
||||
schemaNewColumn(t, "c20", 15979, sql.Float64, false),
|
||||
//schemaNewColumn(t, "c21", 121, sql.MustCreateDecimalType(10, 5), false),
|
||||
schemaNewColumn(t, "c22", 122, sql.Uint32, false),
|
||||
schemaNewColumn(t, "c23", 123, sql.Uint8, false),
|
||||
schemaNewColumn(t, "c24", 124, sql.Uint16, false),
|
||||
schemaNewColumn(t, "c25", 125, sql.Uint24, false),
|
||||
schemaNewColumn(t, "c26", 126, sql.Uint64, false),
|
||||
schemaNewColumn(t, "c22", 2910, sql.Uint32, false),
|
||||
schemaNewColumn(t, "c23", 8740, sql.Uint8, false),
|
||||
schemaNewColumn(t, "c24", 8689, sql.Uint16, false),
|
||||
schemaNewColumn(t, "c25", 5243, sql.Uint24, false),
|
||||
schemaNewColumn(t, "c26", 9338, sql.Uint64, false),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "Test primary keys",
|
||||
query: `create table testTable (
|
||||
id int comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(80) comment 'tag:102',
|
||||
is_married bool comment 'tag:103',
|
||||
id int,
|
||||
age int,
|
||||
first_name varchar(80),
|
||||
is_married bool,
|
||||
primary key (id, age))`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test not null constraints",
|
||||
query: `create table testTable (
|
||||
id int comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(80) not null comment 'tag:102',
|
||||
is_married bool comment 'tag:103',
|
||||
id int,
|
||||
age int,
|
||||
first_name varchar(80) not null,
|
||||
is_married bool,
|
||||
primary key (id, age))`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test quoted columns",
|
||||
query: "create table testTable (" +
|
||||
"`id` int comment 'tag:100', " +
|
||||
"`age` int comment 'tag:101', " +
|
||||
"`timestamp` varchar(80) comment 'tag:102', " +
|
||||
"`is married` bool comment 'tag:103', " +
|
||||
"`id` int, " +
|
||||
"`age` int, " +
|
||||
"`timestamp` varchar(80), " +
|
||||
"`is married` bool, " +
|
||||
"primary key (`id`, `age`))",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "timestamp", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "timestamp", 10168, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test tag comments",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:5', age int comment 'tag:10')`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 5, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 10, sql.Int32, false)),
|
||||
},
|
||||
{
|
||||
name: "Test faulty tag comments",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:a', age int comment 'this is my personal area')`,
|
||||
id int primary key, age int)`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
@@ -233,34 +224,34 @@ func TestCreateTable(t *testing.T) {
|
||||
{
|
||||
name: "Test ip2nation",
|
||||
query: `CREATE TABLE ip2nation (
|
||||
ip int(11) unsigned NOT NULL default 0 comment 'tag:100',
|
||||
country char(2) NOT NULL default '' comment 'tag:101',
|
||||
ip int(11) unsigned NOT NULL default 0,
|
||||
country char(2) NOT NULL default '',
|
||||
PRIMARY KEY (ip));`,
|
||||
expectedTable: "ip2nation",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumnWDefVal(t, "ip", 100, sql.Uint32, true, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 101, sql.MustCreateStringWithDefaults(sqltypes.Char, 2), false, `""`, schema.NotNullConstraint{})),
|
||||
schemaNewColumnWDefVal(t, "ip", 7265, sql.Uint32, true, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 6630, sql.MustCreateStringWithDefaults(sqltypes.Char, 2), false, `""`, schema.NotNullConstraint{})),
|
||||
},
|
||||
{
|
||||
name: "Test ip2nationCountries",
|
||||
expectedTable: "ip2nationCountries",
|
||||
query: `CREATE TABLE ip2nationCountries (
|
||||
code varchar(4) NOT NULL default '' COMMENT 'tag:100',
|
||||
iso_code_2 varchar(2) NOT NULL default '' COMMENT 'tag:101',
|
||||
iso_code_3 varchar(3) default '' COMMENT 'tag:102',
|
||||
iso_country varchar(255) NOT NULL default '' COMMENT 'tag:103',
|
||||
country varchar(255) NOT NULL default '' COMMENT 'tag:104',
|
||||
lat float NOT NULL default 0.0 COMMENT 'tag:105',
|
||||
lon float NOT NULL default 0.0 COMMENT 'tag:106',
|
||||
code varchar(4) NOT NULL default '',
|
||||
iso_code_2 varchar(2) NOT NULL default '',
|
||||
iso_code_3 varchar(3) default '',
|
||||
iso_country varchar(255) NOT NULL default '',
|
||||
country varchar(255) NOT NULL default '',
|
||||
lat float NOT NULL default 0.0,
|
||||
lon float NOT NULL default 0.0,
|
||||
PRIMARY KEY (code));`,
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumnWDefVal(t, "code", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 4), true, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_2", 101, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 2), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_3", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 3), false, `""`),
|
||||
schemaNewColumnWDefVal(t, "iso_country", 103, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 104, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lat", 105, sql.Float32, false, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lon", 106, sql.Float32, false, "0", schema.NotNullConstraint{})),
|
||||
schemaNewColumnWDefVal(t, "code", 7802, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 4), true, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_2", 9266, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 2), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_3", 8427, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 3), false, `""`),
|
||||
schemaNewColumnWDefVal(t, "iso_country", 7151, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 879, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lat", 3502, sql.Float32, false, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lon", 9907, sql.Float32, false, "0", schema.NotNullConstraint{})),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -289,7 +280,7 @@ func TestCreateTable(t *testing.T) {
|
||||
|
||||
sch, err := table.GetSchema(ctx)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedSchema, sch)
|
||||
equalSchemas(t, tt.expectedSchema, sch)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -375,16 +366,16 @@ func TestAddColumn(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "alter add column",
|
||||
query: "alter table people add (newColumn varchar(80) comment 'tag:100')",
|
||||
query: "alter table people add (newColumn varchar(80))",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumn(t, "newColumn", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false)),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, nil),
|
||||
schemaNewColumn(t, "newColumn", 4208, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false)),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 4208, nil),
|
||||
},
|
||||
{
|
||||
name: "alter add column first",
|
||||
query: "alter table people add newColumn varchar(80) comment 'tag:100' first",
|
||||
query: "alter table people add newColumn varchar(80) first",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "newColumn", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "newColumn", 4208, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schema.NewColumn("id", IdTag, types.IntKind, true, schema.NotNullConstraint{}),
|
||||
schema.NewColumn("first_name", FirstNameTag, types.StringKind, false, schema.NotNullConstraint{}),
|
||||
schema.NewColumn("last_name", LastNameTag, types.StringKind, false, schema.NotNullConstraint{}),
|
||||
@@ -394,82 +385,77 @@ func TestAddColumn(t *testing.T) {
|
||||
schema.NewColumn("uuid", UuidTag, types.UUIDKind, false),
|
||||
schema.NewColumn("num_episodes", NumEpisodesTag, types.UintKind, false),
|
||||
),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, nil),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 4208, nil),
|
||||
},
|
||||
{
|
||||
name: "alter add column middle",
|
||||
query: "alter table people add newColumn varchar(80) comment 'tag:100' after last_name",
|
||||
query: "alter table people add newColumn varchar(80) after last_name",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schema.NewColumn("id", IdTag, types.IntKind, true, schema.NotNullConstraint{}),
|
||||
schema.NewColumn("first_name", FirstNameTag, types.StringKind, false, schema.NotNullConstraint{}),
|
||||
schema.NewColumn("last_name", LastNameTag, types.StringKind, false, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "newColumn", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "newColumn", 4208, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schema.NewColumn("is_married", IsMarriedTag, types.BoolKind, false),
|
||||
schema.NewColumn("age", AgeTag, types.IntKind, false),
|
||||
schema.NewColumn("rating", RatingTag, types.FloatKind, false),
|
||||
schema.NewColumn("uuid", UuidTag, types.UUIDKind, false),
|
||||
schema.NewColumn("num_episodes", NumEpisodesTag, types.UintKind, false),
|
||||
),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, nil),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 4208, nil),
|
||||
},
|
||||
{
|
||||
name: "alter add column not null",
|
||||
query: "alter table people add (newColumn varchar(80) not null default 'default' comment 'tag:100')",
|
||||
query: "alter table people add (newColumn varchar(80) not null default 'default')",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumnWDefVal(t, "newColumn", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, `"default"`, schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, types.String("default")),
|
||||
schemaNewColumnWDefVal(t, "newColumn", 4208, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, `"default"`, schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 4208, types.String("default")),
|
||||
},
|
||||
{
|
||||
name: "alter add column not null with expression default",
|
||||
query: "alter table people add (newColumn int not null default 2+2/2 comment 'tag:100')",
|
||||
query: "alter table people add (newColumn int not null default 2+2/2)",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumnWDefVal(t, "newColumn", 100, sql.Int32, false, "(2 + 2 / 2)", schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, types.Int(3)),
|
||||
schemaNewColumnWDefVal(t, "newColumn", 4435, sql.Int32, false, "(2 + 2 / 2)", schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 4435, types.Int(3)),
|
||||
},
|
||||
{
|
||||
name: "alter add column not null with negative expression",
|
||||
query: "alter table people add (newColumn float not null default -1.1 comment 'tag:100')",
|
||||
query: "alter table people add (newColumn float not null default -1.1)",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumnWDefVal(t, "newColumn", 100, sql.Float32, false, "-1.1", schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 100, types.Float(float32(-1.1))),
|
||||
schemaNewColumnWDefVal(t, "newColumn", 13066, sql.Float32, false, "-1.1", schema.NotNullConstraint{})),
|
||||
expectedRows: dtestutils.AddColToRows(t, AllPeopleRows, 13066, types.Float(float32(-1.1))),
|
||||
},
|
||||
{
|
||||
name: "alter add column not null with type mismatch in default",
|
||||
query: "alter table people add (newColumn float not null default 'not a number' comment 'tag:100')",
|
||||
query: "alter table people add (newColumn float not null default 'not a number')",
|
||||
expectedErr: "incompatible type",
|
||||
},
|
||||
{
|
||||
name: "alter add column column not found",
|
||||
query: "alter table people add column newColumn float comment 'tag:100' after notFound",
|
||||
query: "alter table people add column newColumn float after notFound",
|
||||
expectedErr: `table "people" does not have column "notFound"`,
|
||||
},
|
||||
{
|
||||
name: "alter add column table not found",
|
||||
query: "alter table notFound add column newColumn float comment 'tag:100'",
|
||||
query: "alter table notFound add column newColumn float",
|
||||
expectedErr: "table not found: notFound",
|
||||
},
|
||||
{
|
||||
name: "alter add column with tag conflict",
|
||||
query: fmt.Sprintf("alter table people add (newColumn float default 1.0 comment 'tag:%d')", IdTag),
|
||||
expectedErr: fmt.Sprintf("Cannot create column newColumn, the tag %d was already used in table people", IdTag),
|
||||
},
|
||||
{
|
||||
name: "alter add column not null without default",
|
||||
query: "alter table people add (newColumn varchar(80) not null comment 'tag:100')",
|
||||
query: "alter table people add (newColumn varchar(80) not null)",
|
||||
expectedErr: "must have a non-null default value",
|
||||
},
|
||||
{
|
||||
name: "alter add column nullable",
|
||||
query: "alter table people add (newColumn bigint comment 'tag:100')",
|
||||
query: "alter table people add (newColumn bigint)",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumn(t, "newColumn", 100, sql.Int64, false)),
|
||||
schemaNewColumn(t, "newColumn", 4435, sql.Int64, false)),
|
||||
expectedRows: AllPeopleRows,
|
||||
},
|
||||
{
|
||||
name: "alter add column with optional column keyword",
|
||||
query: "alter table people add column (newColumn varchar(80) comment 'tag:100')",
|
||||
query: "alter table people add column (newColumn varchar(80))",
|
||||
expectedSchema: dtestutils.AddColumnToSchema(PeopleTestSchema,
|
||||
schemaNewColumn(t, "newColumn", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false)),
|
||||
schemaNewColumn(t, "newColumn", 4208, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false)),
|
||||
expectedRows: AllPeopleRows,
|
||||
},
|
||||
}
|
||||
@@ -496,7 +482,7 @@ func TestAddColumn(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
sch, err := table.GetSchema(ctx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedSchema, sch)
|
||||
equalSchemas(t, tt.expectedSchema, sch)
|
||||
|
||||
updatedTable, ok, err := updatedRoot.GetTable(ctx, "people")
|
||||
assert.NoError(t, err)
|
||||
@@ -616,21 +602,11 @@ func TestModifyAndChangeColumn(t *testing.T) {
|
||||
),
|
||||
expectedRows: AllPeopleRows,
|
||||
},
|
||||
{
|
||||
name: "alter modify column change tag",
|
||||
query: "alter table people modify column first_name longtext not null comment 'tag:100'",
|
||||
expectedErr: "cannot change the tag of an existing column",
|
||||
},
|
||||
{
|
||||
name: "alter modify column not null with type mismatch in default",
|
||||
query: "alter table people modify rating double default 'not a number'",
|
||||
expectedErr: "incompatible type for default value",
|
||||
},
|
||||
{
|
||||
name: "alter modify column with tag conflict",
|
||||
query: "alter table people modify rating double default 1.0 comment 'tag:1'",
|
||||
expectedErr: "cannot change the tag of an existing column",
|
||||
},
|
||||
{
|
||||
name: "alter modify column with type change",
|
||||
query: "alter table people modify rating varchar(10)",
|
||||
@@ -665,7 +641,7 @@ func TestModifyAndChangeColumn(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
sch, err := table.GetSchema(ctx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedSchema, sch)
|
||||
equalSchemas(t, tt.expectedSchema, sch)
|
||||
|
||||
updatedTable, ok, err := updatedRoot.GetTable(ctx, "people")
|
||||
assert.NoError(t, err)
|
||||
@@ -1060,18 +1036,18 @@ func TestParseCreateTableStatement(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Test create single column schema",
|
||||
query: "create table testTable (id int primary key comment 'tag:100')",
|
||||
query: "create table testTable (id int primary key)",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{})),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{})),
|
||||
},
|
||||
{
|
||||
name: "Test create two column schema",
|
||||
query: "create table testTable (id int primary key comment 'tag:100', age int comment 'tag:101')",
|
||||
query: "create table testTable (id int primary key, age int)",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, false)),
|
||||
},
|
||||
{
|
||||
name: "Test syntax error",
|
||||
@@ -1092,132 +1068,123 @@ func TestParseCreateTableStatement(t *testing.T) {
|
||||
{
|
||||
name: "Test types",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(255) comment 'tag:102',
|
||||
is_married boolean comment 'tag:103') `,
|
||||
id int primary key,
|
||||
age int,
|
||||
first_name varchar(255),
|
||||
is_married boolean) `,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, false),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, false),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test all supported types",
|
||||
expectedTable: "testTable",
|
||||
query: `create table testTable (
|
||||
c0 int primary key comment 'tag:100',
|
||||
c1 tinyint comment 'tag:101',
|
||||
c2 smallint comment 'tag:102',
|
||||
c3 mediumint comment 'tag:103',
|
||||
c4 integer comment 'tag:104',
|
||||
c5 bigint comment 'tag:105',
|
||||
c6 bool comment 'tag:106',
|
||||
c7 boolean comment 'tag:107',
|
||||
c8 bit(10) comment 'tag:108',
|
||||
c9 text comment 'tag:109',
|
||||
c10 tinytext comment 'tag:110',
|
||||
c11 mediumtext comment 'tag:111',
|
||||
c12 longtext comment 'tag:112',
|
||||
c16 char(5) comment 'tag:116',
|
||||
c17 varchar(255) comment 'tag:117',
|
||||
c18 varchar(80) comment 'tag:118',
|
||||
c19 float comment 'tag:119',
|
||||
c20 double comment 'tag:120',
|
||||
c22 int unsigned comment 'tag:122',
|
||||
c23 tinyint unsigned comment 'tag:123',
|
||||
c24 smallint unsigned comment 'tag:124',
|
||||
c25 mediumint unsigned comment 'tag:125',
|
||||
c26 bigint unsigned comment 'tag:126')`,
|
||||
c0 int primary key,
|
||||
c1 tinyint,
|
||||
c2 smallint,
|
||||
c3 mediumint,
|
||||
c4 integer,
|
||||
c5 bigint,
|
||||
c6 bool,
|
||||
c7 boolean,
|
||||
c8 bit(10),
|
||||
c9 text,
|
||||
c10 tinytext,
|
||||
c11 mediumtext,
|
||||
c12 longtext,
|
||||
c16 char(5),
|
||||
c17 varchar(255),
|
||||
c18 varchar(80),
|
||||
c19 float,
|
||||
c20 double,
|
||||
c22 int unsigned,
|
||||
c23 tinyint unsigned,
|
||||
c24 smallint unsigned,
|
||||
c25 mediumint unsigned,
|
||||
c26 bigint unsigned)`,
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "c0", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "c1", 101, sql.Int8, false),
|
||||
schemaNewColumn(t, "c2", 102, sql.Int16, false),
|
||||
schemaNewColumn(t, "c3", 103, sql.Int24, false),
|
||||
schemaNewColumn(t, "c4", 104, sql.Int32, false),
|
||||
schemaNewColumn(t, "c5", 105, sql.Int64, false),
|
||||
schemaNewColumn(t, "c6", 106, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c7", 107, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c8", 108, sql.MustCreateBitType(10), false),
|
||||
schemaNewColumn(t, "c9", 109, sql.Text, false),
|
||||
schemaNewColumn(t, "c10", 110, sql.TinyText, false),
|
||||
schemaNewColumn(t, "c11", 111, sql.MediumText, false),
|
||||
schemaNewColumn(t, "c12", 112, sql.LongText, false),
|
||||
schemaNewColumn(t, "c0", 594, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "c1", 601, sql.Int8, false),
|
||||
schemaNewColumn(t, "c2", 14542, sql.Int16, false),
|
||||
schemaNewColumn(t, "c3", 13309, sql.Int24, false),
|
||||
schemaNewColumn(t, "c4", 15884, sql.Int32, false),
|
||||
schemaNewColumn(t, "c5", 14619, sql.Int64, false),
|
||||
schemaNewColumn(t, "c6", 13192, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c7", 5981, sql.Boolean, false),
|
||||
schemaNewColumn(t, "c8", 14871, sql.MustCreateBitType(10), false),
|
||||
schemaNewColumn(t, "c9", 4167, sql.Text, false),
|
||||
schemaNewColumn(t, "c10", 1965, sql.TinyText, false),
|
||||
schemaNewColumn(t, "c11", 12860, sql.MediumText, false),
|
||||
schemaNewColumn(t, "c12", 7155, sql.LongText, false),
|
||||
//schemaNewColumn(t, "c13", 113, sql.TinyBlob, false),
|
||||
//schemaNewColumn(t, "c14", 114, sql.Blob, false),
|
||||
//schemaNewColumn(t, "c15", 115, sql.LongBlob, false),
|
||||
schemaNewColumn(t, "c16", 116, sql.MustCreateStringWithDefaults(sqltypes.Char, 5), false),
|
||||
schemaNewColumn(t, "c17", 117, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "c18", 118, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "c19", 119, sql.Float32, false),
|
||||
schemaNewColumn(t, "c20", 120, sql.Float64, false),
|
||||
schemaNewColumn(t, "c16", 15859, sql.MustCreateStringWithDefaults(sqltypes.Char, 5), false),
|
||||
schemaNewColumn(t, "c17", 11710, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false),
|
||||
schemaNewColumn(t, "c18", 6838, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "c19", 9377, sql.Float32, false),
|
||||
schemaNewColumn(t, "c20", 15979, sql.Float64, false),
|
||||
//schemaNewColumn(t, "c21", 121, sql.MustCreateDecimalType(10, 5), false),
|
||||
schemaNewColumn(t, "c22", 122, sql.Uint32, false),
|
||||
schemaNewColumn(t, "c23", 123, sql.Uint8, false),
|
||||
schemaNewColumn(t, "c24", 124, sql.Uint16, false),
|
||||
schemaNewColumn(t, "c25", 125, sql.Uint24, false),
|
||||
schemaNewColumn(t, "c26", 126, sql.Uint64, false),
|
||||
schemaNewColumn(t, "c22", 2910, sql.Uint32, false),
|
||||
schemaNewColumn(t, "c23", 8740, sql.Uint8, false),
|
||||
schemaNewColumn(t, "c24", 8689, sql.Uint16, false),
|
||||
schemaNewColumn(t, "c25", 5243, sql.Uint24, false),
|
||||
schemaNewColumn(t, "c26", 9338, sql.Uint64, false),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "Test primary keys",
|
||||
query: `create table testTable (
|
||||
id int comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(80) comment 'tag:102',
|
||||
is_married bool comment 'tag:103',
|
||||
id int,
|
||||
age int,
|
||||
first_name varchar(80),
|
||||
is_married bool,
|
||||
primary key (id, age))`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test not null constraints",
|
||||
query: `create table testTable (
|
||||
id int comment 'tag:100',
|
||||
age int comment 'tag:101',
|
||||
first_name varchar(80) not null comment 'tag:102',
|
||||
is_married bool comment 'tag:103',
|
||||
id int,
|
||||
age int,
|
||||
first_name varchar(80) not null,
|
||||
is_married bool,
|
||||
primary key (id, age))`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "is_married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "first_name", 3264, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "is_married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test quoted columns",
|
||||
query: "create table testTable (" +
|
||||
"`id` int comment 'tag:100', " +
|
||||
"`age` int comment 'tag:101', " +
|
||||
"`timestamp` varchar(80) comment 'tag:102', " +
|
||||
"`is married` bool comment 'tag:103', " +
|
||||
"`id` int, " +
|
||||
"`age` int, " +
|
||||
"`timestamp` varchar(80), " +
|
||||
"`is married` bool, " +
|
||||
"primary key (`id`, `age`))",
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 100, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 101, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "timestamp", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is married", 103, sql.Boolean, false)),
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 7208, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "timestamp", 10168, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 80), false),
|
||||
schemaNewColumn(t, "is married", 14626, sql.Boolean, false)),
|
||||
},
|
||||
{
|
||||
name: "Test tag comments",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:5', age int comment 'tag:10')`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 5, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
schemaNewColumn(t, "age", 10, sql.Int32, false)),
|
||||
},
|
||||
{
|
||||
name: "Test faulty tag comments",
|
||||
query: `create table testTable (
|
||||
id int primary key comment 'tag:a', age int comment 'this is my personal area')`,
|
||||
id int primary key, age int)`,
|
||||
expectedTable: "testTable",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumn(t, "id", 4817, sql.Int32, true, schema.NotNullConstraint{}),
|
||||
@@ -1227,34 +1194,34 @@ func TestParseCreateTableStatement(t *testing.T) {
|
||||
{
|
||||
name: "Test ip2nation",
|
||||
query: `CREATE TABLE ip2nation (
|
||||
ip int(11) unsigned NOT NULL default 0 comment 'tag:100',
|
||||
country char(2) NOT NULL default '' comment 'tag:101',
|
||||
ip int(11) unsigned NOT NULL default 0,
|
||||
country char(2) NOT NULL default '',
|
||||
PRIMARY KEY (ip));`,
|
||||
expectedTable: "ip2nation",
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumnWDefVal(t, "ip", 100, sql.Uint32, true, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 101, sql.MustCreateStringWithDefaults(sqltypes.Char, 2), false, `""`, schema.NotNullConstraint{})),
|
||||
schemaNewColumnWDefVal(t, "ip", 7265, sql.Uint32, true, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 6630, sql.MustCreateStringWithDefaults(sqltypes.Char, 2), false, `""`, schema.NotNullConstraint{})),
|
||||
},
|
||||
{
|
||||
name: "Test ip2nationCountries",
|
||||
expectedTable: "ip2nationCountries",
|
||||
query: `CREATE TABLE ip2nationCountries (
|
||||
code varchar(4) NOT NULL default '' COMMENT 'tag:100',
|
||||
iso_code_2 varchar(2) NOT NULL default '' COMMENT 'tag:101',
|
||||
iso_code_3 varchar(3) default '' COMMENT 'tag:102',
|
||||
iso_country varchar(255) NOT NULL default '' COMMENT 'tag:103',
|
||||
country varchar(255) NOT NULL default '' COMMENT 'tag:104',
|
||||
lat float NOT NULL default 0.0 COMMENT 'tag:105',
|
||||
lon float NOT NULL default 0.0 COMMENT 'tag:106',
|
||||
code varchar(4) NOT NULL default '',
|
||||
iso_code_2 varchar(2) NOT NULL default '',
|
||||
iso_code_3 varchar(3) default '',
|
||||
iso_country varchar(255) NOT NULL default '',
|
||||
country varchar(255) NOT NULL default '',
|
||||
lat float NOT NULL default 0.0,
|
||||
lon float NOT NULL default 0.0,
|
||||
PRIMARY KEY (code));`,
|
||||
expectedSchema: dtestutils.CreateSchema(
|
||||
schemaNewColumnWDefVal(t, "code", 100, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 4), true, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_2", 101, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 2), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_3", 102, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 3), false, `""`),
|
||||
schemaNewColumnWDefVal(t, "iso_country", 103, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 104, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lat", 105, sql.Float32, false, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lon", 106, sql.Float32, false, "0", schema.NotNullConstraint{})),
|
||||
schemaNewColumnWDefVal(t, "code", 7802, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 4), true, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_2", 9266, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 2), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "iso_code_3", 8427, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 3), false, `""`),
|
||||
schemaNewColumnWDefVal(t, "iso_country", 7151, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "country", 879, sql.MustCreateStringWithDefaults(sqltypes.VarChar, 255), false, `""`, schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lat", 3502, sql.Float32, false, "0", schema.NotNullConstraint{}),
|
||||
schemaNewColumnWDefVal(t, "lon", 9907, sql.Float32, false, "0", schema.NotNullConstraint{})),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1271,9 +1238,7 @@ func TestParseCreateTableStatement(t *testing.T) {
|
||||
assert.Contains(t, err.Error(), tt.expectedErr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
eq, err := schema.SchemasAreEqual(tt.expectedSchema, sch)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, eq)
|
||||
equalSchemas(t, tt.expectedSchema, sch)
|
||||
assert.Equal(t, tt.expectedTable, tblName)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -30,19 +30,19 @@ import (
|
||||
)
|
||||
|
||||
const expectedCreateSQL = "CREATE TABLE `table_name` (\n" +
|
||||
" `id` BIGINT NOT NULL COMMENT 'tag:200',\n" +
|
||||
" `first_name` LONGTEXT NOT NULL COMMENT 'tag:201',\n" +
|
||||
" `last_name` LONGTEXT NOT NULL COMMENT 'tag:202',\n" +
|
||||
" `is_married` BIT(1) COMMENT 'tag:203',\n" +
|
||||
" `age` BIGINT COMMENT 'tag:204',\n" +
|
||||
" `rating` DOUBLE COMMENT 'tag:206',\n" +
|
||||
" `uuid` CHAR(36) CHARACTER SET ascii COLLATE ascii_bin COMMENT 'tag:207',\n" +
|
||||
" `num_episodes` BIGINT UNSIGNED COMMENT 'tag:208',\n" +
|
||||
" `id` BIGINT NOT NULL,\n" +
|
||||
" `first_name` LONGTEXT NOT NULL,\n" +
|
||||
" `last_name` LONGTEXT NOT NULL,\n" +
|
||||
" `is_married` BIT(1),\n" +
|
||||
" `age` BIGINT,\n" +
|
||||
" `rating` DOUBLE,\n" +
|
||||
" `uuid` CHAR(36) CHARACTER SET ascii COLLATE ascii_bin,\n" +
|
||||
" `num_episodes` BIGINT UNSIGNED,\n" +
|
||||
" PRIMARY KEY (`id`)\n" +
|
||||
");"
|
||||
const expectedDropSql = "DROP TABLE `table_name`;"
|
||||
const expectedDropIfExistsSql = "DROP TABLE IF EXISTS `table_name`;"
|
||||
const expectedAddColSql = "ALTER TABLE `table_name` ADD `c0` BIGINT NOT NULL COMMENT 'tag:9';"
|
||||
const expectedAddColSql = "ALTER TABLE `table_name` ADD `c0` BIGINT NOT NULL;"
|
||||
const expectedDropColSql = "ALTER TABLE `table_name` DROP `first_name`;"
|
||||
const expectedRenameColSql = "ALTER TABLE `table_name` RENAME COLUMN `id` TO `pk`;"
|
||||
const expectedRenameTableSql = "RENAME TABLE `table_name` TO `new_table_name`;"
|
||||
@@ -56,7 +56,7 @@ type test struct {
|
||||
|
||||
func TestSchemaAsCreateStmt(t *testing.T) {
|
||||
tSchema := sqltestutil.PeopleTestSchema
|
||||
stmt := CreateTableStmtWithTags("table_name", tSchema, nil, nil)
|
||||
stmt := CreateTableStmt("table_name", tSchema, nil, nil)
|
||||
|
||||
assert.Equal(t, expectedCreateSQL, stmt)
|
||||
}
|
||||
@@ -74,7 +74,7 @@ func TestTableDropIfExistsStmt(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAlterTableAddColStmt(t *testing.T) {
|
||||
newColDef := "`c0` BIGINT NOT NULL COMMENT 'tag:9'"
|
||||
newColDef := "`c0` BIGINT NOT NULL"
|
||||
stmt := AlterTableAddColStmt("table_name", newColDef)
|
||||
|
||||
assert.Equal(t, expectedAddColSql, stmt)
|
||||
|
||||
@@ -23,9 +23,6 @@ import (
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
)
|
||||
|
||||
//TODO: This is a relic from before `SHOW CREATE TABLE` was implemented. We should remove this file altogether.
|
||||
const TagCommentPrefix = "tag:"
|
||||
|
||||
// FmtCol converts a column to a string with a given indent space count, name width, and type width. If nameWidth or
|
||||
// typeWidth are 0 or less than the length of the name or type, then the length of the name or type will be used
|
||||
func FmtCol(indent, nameWidth, typeWidth int, col schema.Column) string {
|
||||
@@ -33,12 +30,6 @@ func FmtCol(indent, nameWidth, typeWidth int, col schema.Column) string {
|
||||
return FmtColWithNameAndType(indent, nameWidth, typeWidth, col.Name, sqlType.String(), col)
|
||||
}
|
||||
|
||||
// FmtColWithTag follows the same logic as FmtCol, but includes the column's tag as a comment
|
||||
func FmtColWithTag(indent, nameWidth, typeWidth int, col schema.Column) string {
|
||||
fc := FmtCol(indent, nameWidth, typeWidth, col)
|
||||
return fmt.Sprintf("%s COMMENT '%s'", fc, FmtColTagComment(col.Tag))
|
||||
}
|
||||
|
||||
// FmtColWithNameAndType creates a string representing a column within a sql create table statement with a given indent
|
||||
// space count, name width, and type width. If nameWidth or typeWidth are 0 or less than the length of the name or
|
||||
// type, then the length of the name or type will be used.
|
||||
@@ -60,6 +51,10 @@ func FmtColWithNameAndType(indent, nameWidth, typeWidth int, colName, typeStr st
|
||||
colStr += " DEFAULT " + col.Default
|
||||
}
|
||||
|
||||
if col.Comment != "" {
|
||||
colStr += " COMMENT " + QuoteComment(col.Comment)
|
||||
}
|
||||
|
||||
return colStr
|
||||
}
|
||||
|
||||
@@ -70,10 +65,6 @@ func FmtColPrimaryKey(indent int, colStr string) string {
|
||||
return fmt.Sprintf(fmtStr, "")
|
||||
}
|
||||
|
||||
func FmtColTagComment(tag uint64) string {
|
||||
return fmt.Sprintf("%s%d", TagCommentPrefix, tag)
|
||||
}
|
||||
|
||||
func FmtIndex(index schema.Index) string {
|
||||
sb := strings.Builder{}
|
||||
if index.IsUnique() {
|
||||
@@ -137,13 +128,6 @@ func CreateTableStmt(tableName string, sch schema.Schema, foreignKeys []doltdb.F
|
||||
}, foreignKeys, parentSchs)
|
||||
}
|
||||
|
||||
// CreateTableStmtWithTags generates a SQL CREATE TABLE command that includes the column tags as comments
|
||||
func CreateTableStmtWithTags(tableName string, sch schema.Schema, foreignKeys []doltdb.ForeignKey, parentSchs map[string]schema.Schema) string {
|
||||
return createTableStmt(tableName, sch, func(col schema.Column) string {
|
||||
return FmtColWithTag(2, 0, 0, col)
|
||||
}, foreignKeys, parentSchs)
|
||||
}
|
||||
|
||||
type fmtColFunc func(col schema.Column) string
|
||||
|
||||
func createTableStmt(tableName string, sch schema.Schema, fmtCol fmtColFunc, foreignKeys []doltdb.ForeignKey, parentSchs map[string]schema.Schema) string {
|
||||
|
||||
@@ -36,34 +36,34 @@ func TestFmtCol(t *testing.T) {
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
"`first` LONGTEXT COMMENT 'tag:0'",
|
||||
"`first` LONGTEXT",
|
||||
},
|
||||
{
|
||||
schema.NewColumn("last", 123, types.IntKind, true),
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
" `last` BIGINT COMMENT 'tag:123'",
|
||||
" `last` BIGINT",
|
||||
},
|
||||
{
|
||||
schema.NewColumn("title", 2, types.UintKind, true),
|
||||
0,
|
||||
10,
|
||||
0,
|
||||
" `title` BIGINT UNSIGNED COMMENT 'tag:2'",
|
||||
" `title` BIGINT UNSIGNED",
|
||||
},
|
||||
{
|
||||
schema.NewColumn("aoeui", 52, types.UintKind, true),
|
||||
0,
|
||||
10,
|
||||
15,
|
||||
" `aoeui` BIGINT UNSIGNED COMMENT 'tag:52'",
|
||||
" `aoeui` BIGINT UNSIGNED",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.Expected, func(t *testing.T) {
|
||||
actual := FmtColWithTag(test.Indent, test.NameWidth, test.TypeWidth, test.Col)
|
||||
actual := FmtCol(test.Indent, test.NameWidth, test.TypeWidth, test.Col)
|
||||
assert.Equal(t, test.Expected, actual)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -68,11 +68,12 @@ type DoltTable struct {
|
||||
table *doltdb.Table
|
||||
sch schema.Schema
|
||||
sqlSch sql.Schema
|
||||
db Database
|
||||
db SqlDatabase
|
||||
}
|
||||
|
||||
var _ sql.Table = (*DoltTable)(nil)
|
||||
var _ sql.IndexedTable = (*DoltTable)(nil)
|
||||
var _ sql.ForeignKeyTable = (*DoltTable)(nil)
|
||||
|
||||
// WithIndexLookup implements sql.IndexedTable
|
||||
func (t *DoltTable) WithIndexLookup(lookup sql.IndexLookup) sql.Table {
|
||||
@@ -219,6 +220,7 @@ func (t *DoltTable) PartitionRows(ctx *sql.Context, partition sql.Partition) (sq
|
||||
// WritableDoltTable allows updating, deleting, and inserting new rows. It implements sql.UpdatableTable and friends.
|
||||
type WritableDoltTable struct {
|
||||
DoltTable
|
||||
db Database
|
||||
ed *sqlTableEditor
|
||||
}
|
||||
|
||||
@@ -284,6 +286,45 @@ func (t *WritableDoltTable) Updater(ctx *sql.Context) sql.RowUpdater {
|
||||
return te
|
||||
}
|
||||
|
||||
// GetForeignKeys implements sql.ForeignKeyTable
|
||||
func (t *DoltTable) GetForeignKeys(ctx *sql.Context) ([]sql.ForeignKeyConstraint, error) {
|
||||
root, err := t.db.GetRoot(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fkc, err := root.GetForeignKeyCollection(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
declaredFk, _ := fkc.KeysForTable(t.name)
|
||||
toReturn := make([]sql.ForeignKeyConstraint, len(declaredFk))
|
||||
|
||||
for i, fk := range declaredFk {
|
||||
parent, ok, err := root.GetTable(ctx, fk.ReferencedTableName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("cannot find table %s "+
|
||||
"referenced in foreign key %s", fk.ReferencedTableName, fk.Name)
|
||||
}
|
||||
|
||||
parentSch, err := parent.GetSchema(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
toReturn[i], err = toForeignKeyConstraint(fk, t.sch, parentSch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return toReturn, nil
|
||||
}
|
||||
|
||||
var _ sql.PartitionIter = singlePartitionIter{}
|
||||
|
||||
type singlePartitionIter struct {
|
||||
@@ -394,22 +435,16 @@ func (t *AlterableDoltTable) AddColumn(ctx *sql.Context, column *sql.Column, ord
|
||||
return err
|
||||
}
|
||||
|
||||
tag := sqleSchema.ExtractTag(column)
|
||||
if tag == schema.InvalidTag {
|
||||
// generate a tag if we don't have a user-defined tag
|
||||
ti, err := typeinfo.FromSqlType(column.Type)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tt, err := root.GenerateTagsForNewColumns(ctx, t.name, []string{column.Name}, []types.NomsKind{ti.NomsKind()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tag = tt[0]
|
||||
ti, err := typeinfo.FromSqlType(column.Type)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tags, err := root.GenerateTagsForNewColumns(ctx, t.name, []string{column.Name}, []types.NomsKind{ti.NomsKind()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
col, err := sqleSchema.ToDoltCol(tag, column)
|
||||
col, err := sqleSchema.ToDoltCol(tags[0], column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -423,7 +458,7 @@ func (t *AlterableDoltTable) AddColumn(ctx *sql.Context, column *sql.Column, ord
|
||||
nullable = alterschema.Null
|
||||
}
|
||||
|
||||
updatedTable, err := alterschema.AddColumnToTable(ctx, root, table, t.name, col.Tag, col.Name, col.TypeInfo, nullable, col.Default, orderToOrder(order))
|
||||
updatedTable, err := alterschema.AddColumnToTable(ctx, root, table, t.name, col.Tag, col.Name, col.TypeInfo, nullable, col.Default, col.Comment, orderToOrder(order))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -521,11 +556,6 @@ func (t *AlterableDoltTable) ModifyColumn(ctx *sql.Context, columnName string, c
|
||||
panic(fmt.Sprintf("Column %s not found. This is a bug.", columnName))
|
||||
}
|
||||
|
||||
tag := sqleSchema.ExtractTag(column)
|
||||
if tag != existingCol.Tag && tag != schema.InvalidTag {
|
||||
return errors.New("cannot change the tag of an existing column")
|
||||
}
|
||||
|
||||
col, err := sqleSchema.ToDoltCol(existingCol.Tag, column)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -631,6 +661,10 @@ func (t *AlterableDoltTable) RenameIndex(ctx *sql.Context, fromIndexName string,
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newTable, err = newTable.RenameIndexRowData(ctx, fromIndexName, toIndexName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
root, err := t.db.GetRoot(ctx)
|
||||
if err != nil {
|
||||
@@ -831,45 +865,6 @@ func (t *AlterableDoltTable) DropForeignKey(ctx *sql.Context, fkName string) err
|
||||
return t.updateFromRoot(ctx, newRoot)
|
||||
}
|
||||
|
||||
// GetForeignKeys implements sql.ForeignKeyTable
|
||||
func (t *AlterableDoltTable) GetForeignKeys(ctx *sql.Context) ([]sql.ForeignKeyConstraint, error) {
|
||||
root, err := t.db.GetRoot(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fkc, err := root.GetForeignKeyCollection(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
declaredFk, _ := fkc.KeysForTable(t.name)
|
||||
toReturn := make([]sql.ForeignKeyConstraint, len(declaredFk))
|
||||
|
||||
for i, fk := range declaredFk {
|
||||
parent, ok, err := root.GetTable(ctx, fk.ReferencedTableName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("cannot find table %s "+
|
||||
"referenced in foreign key %s", fk.ReferencedTableName, fk.Name)
|
||||
}
|
||||
|
||||
parentSch, err := parent.GetSchema(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
toReturn[i], err = toForeignKeyConstraint(fk, t.sch, parentSch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return toReturn, nil
|
||||
}
|
||||
|
||||
func toForeignKeyConstraint(fk doltdb.ForeignKey, childSch, parentSch schema.Schema) (cst sql.ForeignKeyConstraint, err error) {
|
||||
cst = sql.ForeignKeyConstraint{
|
||||
Name: fk.Name,
|
||||
|
||||
62
go/libraries/doltcore/sqle/user_space_database.go
Normal file
62
go/libraries/doltcore/sqle/user_space_database.go
Normal file
@@ -0,0 +1,62 @@
|
||||
// Copyright 2020 Liquidata, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package sqle
|
||||
|
||||
import (
|
||||
"github.com/liquidata-inc/go-mysql-server/sql"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
)
|
||||
|
||||
// UserSpaceDatabase in an implementation of sql.Database for root values. Does not expose any of the internal dolt tables.
|
||||
type UserSpaceDatabase struct {
|
||||
*doltdb.RootValue
|
||||
}
|
||||
|
||||
var _ SqlDatabase = (*UserSpaceDatabase)(nil)
|
||||
|
||||
func (db *UserSpaceDatabase) Name() string {
|
||||
return "dolt"
|
||||
}
|
||||
|
||||
func (db *UserSpaceDatabase) GetTableInsensitive(ctx *sql.Context, tableName string) (sql.Table, bool, error) {
|
||||
if doltdb.HasDoltPrefix(tableName) {
|
||||
return nil, false, nil
|
||||
}
|
||||
table, tableName, ok, err := db.RootValue.GetTableInsensitive(ctx, tableName)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
if !ok {
|
||||
return nil, false, nil
|
||||
}
|
||||
sch, err := table.GetSchema(ctx)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
return &DoltTable{name: tableName, table: table, sch: sch, db: db}, true, nil
|
||||
}
|
||||
|
||||
func (db *UserSpaceDatabase) GetTableNames(ctx *sql.Context) ([]string, error) {
|
||||
tableNames, err := db.RootValue.GetTableNames(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return filterDoltInternalTables(tableNames), nil
|
||||
}
|
||||
|
||||
func (db *UserSpaceDatabase) GetRoot(*sql.Context) (*doltdb.RootValue, error) {
|
||||
return db.RootValue, nil
|
||||
}
|
||||
@@ -22,10 +22,10 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/cmd/dolt/errhand"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
dsqle "github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/filesys"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/utils/iohelp"
|
||||
@@ -39,6 +39,7 @@ type SqlExportWriter struct {
|
||||
parentSchs map[string]schema.Schema
|
||||
foreignKeys []doltdb.ForeignKey
|
||||
wr io.WriteCloser
|
||||
root *doltdb.RootValue
|
||||
writtenFirstRow bool
|
||||
}
|
||||
|
||||
@@ -71,6 +72,7 @@ func OpenSQLExportWriter(ctx context.Context, path string, fs filesys.WritableFS
|
||||
sch: sch,
|
||||
parentSchs: allSchemas,
|
||||
foreignKeys: foreignKeys,
|
||||
root: root,
|
||||
wr: wr,
|
||||
}, nil
|
||||
}
|
||||
@@ -87,7 +89,7 @@ func (w *SqlExportWriter) GetSchema() schema.Schema {
|
||||
|
||||
// WriteRow will write a row to a table
|
||||
func (w *SqlExportWriter) WriteRow(ctx context.Context, r row.Row) error {
|
||||
if err := w.maybeWriteDropCreate(); err != nil {
|
||||
if err := w.maybeWriteDropCreate(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -100,12 +102,17 @@ func (w *SqlExportWriter) WriteRow(ctx context.Context, r row.Row) error {
|
||||
return iohelp.WriteLine(w.wr, stmt)
|
||||
}
|
||||
|
||||
func (w *SqlExportWriter) maybeWriteDropCreate() error {
|
||||
func (w *SqlExportWriter) maybeWriteDropCreate(ctx context.Context) error {
|
||||
if !w.writtenFirstRow {
|
||||
var b strings.Builder
|
||||
b.WriteString(sqlfmt.DropTableIfExistsStmt(w.tableName))
|
||||
b.WriteRune('\n')
|
||||
b.WriteString(sqlfmt.CreateTableStmtWithTags(w.tableName, w.sch, w.foreignKeys, w.parentSchs))
|
||||
sqlCtx, engine, _ := dsqle.PrepareCreateTableStmt(ctx, w.root)
|
||||
createTableStmt, err := dsqle.GetCreateTableStmt(sqlCtx, engine, w.tableName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.WriteString(createTableStmt)
|
||||
if err := iohelp.WriteLine(w.wr, b.String()); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -117,7 +124,7 @@ func (w *SqlExportWriter) maybeWriteDropCreate() error {
|
||||
// Close should flush all writes, release resources being held
|
||||
func (w *SqlExportWriter) Close(ctx context.Context) error {
|
||||
// exporting an empty table will not get any WriteRow calls, so write the drop / create here
|
||||
if err := w.maybeWriteDropCreate(); err != nil {
|
||||
if err := w.maybeWriteDropCreate(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -21,11 +21,15 @@ import (
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/doltdb"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/dtestutils"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/row"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/schema/encoding"
|
||||
"github.com/liquidata-inc/dolt/go/libraries/doltcore/sqle/sqlfmt"
|
||||
"github.com/liquidata-inc/dolt/go/store/types"
|
||||
)
|
||||
|
||||
type StringBuilderCloser struct {
|
||||
@@ -47,7 +51,7 @@ func TestEndToEnd(t *testing.T) {
|
||||
id := uuid.MustParse("00000000-0000-0000-0000-000000000000")
|
||||
tableName := "people"
|
||||
|
||||
dropCreateStatement := sqlfmt.DropTableIfExistsStmt(tableName) + "\n" + sqlfmt.CreateTableStmtWithTags(tableName, dtestutils.TypedSchema, nil, nil)
|
||||
dropCreateStatement := sqlfmt.DropTableIfExistsStmt(tableName) + "\nCREATE TABLE `people` (\n `id` char(36) character set ascii collate ascii_bin NOT NULL,\n `name` longtext NOT NULL,\n `age` bigint unsigned NOT NULL,\n `is_married` bit(1) NOT NULL,\n `title` longtext,\n PRIMARY KEY (`id`),\n KEY `idx_name` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"
|
||||
|
||||
type test struct {
|
||||
name string
|
||||
@@ -78,18 +82,33 @@ func TestEndToEnd(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
dEnv := dtestutils.CreateTestEnv()
|
||||
root, err := dEnv.WorkingRoot(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
schVal, err := encoding.MarshalSchemaAsNomsValue(ctx, root.VRW(), tt.sch)
|
||||
require.NoError(t, err)
|
||||
emptyMap, err := types.NewMap(ctx, root.VRW())
|
||||
require.NoError(t, err)
|
||||
tbl, err := doltdb.NewTable(ctx, root.VRW(), schVal, emptyMap, nil)
|
||||
require.NoError(t, err)
|
||||
root, err = root.PutTable(ctx, tableName, tbl)
|
||||
require.NoError(t, err)
|
||||
|
||||
var stringWr StringBuilderCloser
|
||||
w := &SqlExportWriter{
|
||||
tableName: tableName,
|
||||
sch: tt.sch,
|
||||
wr: &stringWr,
|
||||
root: root,
|
||||
}
|
||||
|
||||
for _, r := range tt.rows {
|
||||
assert.NoError(t, w.WriteRow(context.Background(), r))
|
||||
assert.NoError(t, w.WriteRow(ctx, r))
|
||||
}
|
||||
|
||||
assert.NoError(t, w.Close(context.Background()))
|
||||
assert.NoError(t, w.Close(ctx))
|
||||
assert.Equal(t, tt.expectedOutput, stringWr.String())
|
||||
})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user