Merge pull request #1248 from dolthub/zachmu/in-place-typechange

Hidden command for changing the type of a column in place.
This commit is contained in:
Zach Musgrave
2021-01-27 11:02:09 -08:00
committed by GitHub
81 changed files with 652 additions and 495 deletions
+13
View File
@@ -129,6 +129,19 @@ DELIM
[[ "$output" =~ "Error creating reader" ]] || false
}
@test "try to create a table with duplicate column names" {
cat <<CSV > duplicate-names.csv
pk,abc,Abc
1,2,3
4,5,6
CSV
run dolt table import -c --pk=pk test duplicate-names.csv
[ "$status" -eq 1 ]
[[ "$output" =~ "name" ]] || false
[[ "$output" =~ "invalid schema" ]] || false
}
@test "try to create a table with dolt table import with a bad file name" {
run dolt table import -c test `batshelper bad.data`
[ "$status" -eq 1 ]
+93
View File
@@ -134,3 +134,96 @@ SQL
run dolt sql -q "INSERT INTO test2 (pk1, pk2new) VALUES (1, null)"
[ "$status" -eq 1 ]
}
@test "changing column types in place works" {
dolt sql <<SQL
CREATE TABLE test2(
pk1 BIGINT,
pk2 BIGINT,
v1 VARCHAR(100) NOT NULL,
v2 VARCHar(120) NULL,
PRIMARY KEY(pk1, pk2)
);
SQL
run dolt sql -q "INSERT INTO test2 (pk1, pk2, v1, v2) VALUES (1, 1, 'abc', 'def')"
[ "$status" -eq 0 ]
dolt add .
dolt commit -m "Created table with one row"
dolt branch original
# push to a file based remote, clone a copy to pull to later
mkdir remotedir
dolt remote add origin file://remotedir
dolt push origin master
dolt clone file://remotedir original
dolt schema change-type Test2 V1 'varchar(300)'
dolt schema change-type TEST2 PK2 'tinyint'
dolt schema change-type Test2 V2 'varchar(1024)'
run dolt diff
[ "$status" -eq 0 ]
[[ "$output" =~ '< `pk2` BIGINT NOT NULL' ]] || false
[[ "$output" =~ '> `pk2` TINYINT NOT NULL' ]] || false
[[ "$output" =~ '< `v1` VARCHAR(100) NOT NULL' ]] || false
[[ "$output" =~ '> `v1` VARCHAR(300) NOT NULL' ]] || false
[[ "$output" =~ '< `v2` VARCHAR(120)' ]] || false
[[ "$output" =~ '> `v2` VARCHAR(1024)' ]] || false
[[ "$output" =~ 'PRIMARY KEY' ]] || false
dolt add .
dolt commit -m "Changed types"
dolt checkout original
run dolt sql -q "INSERT INTO test2 (pk1, pk2, v1, v2) VALUES (2, 2, 'abc', 'def')"
dolt diff
dolt add .
dolt commit -m "Created table with one row"
dolt merge master
run dolt sql -q 'show create table test2'
[ "$status" -eq 0 ]
[[ "$output" =~ '`pk2` tinyint NOT NULL' ]] || false
[[ "$output" =~ '`v1` varchar(300) NOT NULL' ]] || false
run dolt sql -q 'select * from test2' -r csv
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
[[ "$output" =~ '1,1,abc,def' ]] || false
[[ "$output" =~ '2,2,abc,def' ]] || false
dolt add .
dolt commit -m "merge master"
# push to remote
dolt checkout master
dolt merge original
dolt push origin master
# pull from the remote and make sure there's no issue
cd original
dolt pull
run dolt sql -q 'show create table test2'
[ "$status" -eq 0 ]
[[ "$output" =~ '`pk2` tinyint NOT NULL' ]] || false
[[ "$output" =~ '`v1` varchar(300) NOT NULL' ]] || false
run dolt sql -q 'select * from test2' -r csv
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
[[ "$output" =~ '1,1,abc,def' ]] || false
[[ "$output" =~ '2,2,abc,def' ]] || false
# make sure diff works as expected for schema change on clone
run dolt diff HEAD~2
[ "$status" -eq 0 ]
[[ "$output" =~ '< `pk2` BIGINT NOT NULL' ]] || false
[[ "$output" =~ '> `pk2` TINYINT NOT NULL' ]] || false
[[ "$output" =~ '< `v1` VARCHAR(100) NOT NULL' ]] || false
[[ "$output" =~ '> `v1` VARCHAR(300) NOT NULL' ]] || false
[[ "$output" =~ '< `v2` VARCHAR(120)' ]] || false
[[ "$output" =~ '> `v2` VARCHAR(1024)' ]] || false
[[ "$output" =~ 'PRIMARY KEY' ]] || false
}
+12
View File
@@ -327,3 +327,15 @@ JSON
[[ ! "$output" =~ "\`b\`" ]] || false
[[ ! "$output" =~ "\`c\`" ]] || false
}
@test "failed import, duplicate column name" {
cat <<CSV > import.csv
abc,Abc,d
1,2,3
4,5,6
CSV
run dolt schema import -c -pks=abc test import.csv
[ "$status" -eq 1 ]
[[ "$output" =~ "name" ]] || false
[[ "$output" =~ "invalid schema" ]] || false
}
+39
View File
@@ -370,3 +370,42 @@ SQL
[[ "$output" =~ "\`v1\` bigint COMMENT 'other'" ]] || false
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
}
@test "sql-create-tables: duplicate column errors" {
run dolt sql <<SQL
CREATE TABLE test1 (
a bigint primary key,
A bigint
);
SQL
[ "$status" -eq 1 ]
[[ "$output" =~ "same name" ]] || false
run dolt sql <<SQL
CREATE TABLE test1 (
a bigint primary key,
a bigint
);
SQL
[ "$status" -eq 1 ]
[[ "$output" =~ "same name" ]] || false
dolt sql <<SQL
CREATE TABLE test1 (
a bigint primary key,
b bigint
);
SQL
run dolt sql -q "alter table test1 rename column b to a"
[ "$status" -eq 1 ]
[[ "$output" =~ "name" ]] || false
run dolt sql -q "alter table test1 add column A int"
[ "$status" -eq 1 ]
[[ "$output" =~ "name" ]] || false
run dolt sql -q "alter table test1 change column b A bigint"
[ "$status" -eq 1 ]
[[ "$output" =~ "name" ]] || false
}
+2 -2
View File
@@ -53,7 +53,7 @@ func TestParseKeyValues(t *testing.T) {
mnColTag = 2
)
testKeyColColl, _ := schema.NewColCollection(
testKeyColColl := schema.NewColCollection(
schema.NewColumn(lnColName, lnColTag, types.StringKind, true),
schema.NewColumn(fnColName, fnColTag, types.StringKind, true),
schema.NewColumn(mnColName, mnColTag, types.StringKind, true),
@@ -62,7 +62,7 @@ func TestParseKeyValues(t *testing.T) {
sch, err := schema.SchemaFromCols(testKeyColColl)
require.NoError(t, err)
singleKeyColColl, _ := schema.NewColCollection(
singleKeyColColl := schema.NewColCollection(
schema.NewColumn(lnColName, lnColTag, types.StringKind, true),
)
+2 -2
View File
@@ -524,7 +524,7 @@ func tabularSchemaDiff(ctx context.Context, td diff.TableDelta, fromSchemas, toS
}
}
if !schema.ColCollsAreEqual(fromSch.GetPKCols(), toSch.GetPKCols()) {
if !schema.ColCollsAreCompatible(fromSch.GetPKCols(), toSch.GetPKCols()) {
panic("primary key sets must be the same")
}
pkStr := strings.Join(fromSch.GetPKCols().GetColumnNames(), ", ")
@@ -655,7 +655,7 @@ func dumbDownSchema(in schema.Schema) (schema.Schema, error) {
return nil, err
}
dumbColColl, _ := schema.NewColCollection(dumbCols...)
dumbColColl := schema.NewColCollection(dumbCols...)
return schema.SchemaFromCols(dumbColColl)
}
+2 -2
View File
@@ -71,12 +71,12 @@ func doltSchWithPKFromSqlSchema(sch sql.Schema) schema.Schema {
dSch, _ := sqlutil.ToDoltResultSchema(sch)
// make the first col the PK
pk := false
newCC, _ := schema.MapColCollection(dSch.GetAllCols(), func(col schema.Column) (column schema.Column, err error) {
newCC := schema.MapColCollection(dSch.GetAllCols(), func(col schema.Column) schema.Column {
if !pk {
col.IsPartOfPK = true
pk = true
}
return col, nil
return col
})
return schema.MustSchemaFromCols(newCC)
}
+173
View File
@@ -0,0 +1,173 @@
// Copyright 2021 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package schcmds
import (
"context"
"fmt"
"strconv"
"strings"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/vitess/go/sqltypes"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
commands "github.com/dolthub/dolt/go/cmd/dolt/commands"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
)
type ChangeTypeCmd struct{}
func (cmd ChangeTypeCmd) Hidden() bool {
return true
}
func (cmd ChangeTypeCmd) Name() string {
return "change-type"
}
func (cmd ChangeTypeCmd) Description() string {
return "Changes the type of a column in place"
}
func (cmd ChangeTypeCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
ap := cmd.createArgParser()
help, usage := cli.HelpAndUsagePrinters(cli.GetCommandDocumentation(commandStr, tblSchemaDocs, ap))
apr := cli.ParseArgs(ap, args, help)
if apr.NArg() < 3 {
cli.PrintUsage("change-type <table> <column> <type>",
[]string{"Changes the type of a column in place, without modifying any row data.\n" +
"This is an unsafe operation in general, but widening a type is safe.\n" +
"Only VARCHAR and INTEGER types are currently supported.\n"},
ap)
return 1
}
tableName, column, typ := apr.Arg(0), apr.Arg(1), apr.Arg(2)
root, verr := commands.GetWorkingWithVErr(dEnv)
if verr != nil {
return commands.HandleVErrAndExitCode(verr, usage)
}
table, tableCase, ok, err := root.GetTableInsensitive(ctx, tableName)
if err != nil {
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to get table '%s'", tableName).AddCause(err).Build(), usage)
} else if !ok {
return commands.HandleVErrAndExitCode(errhand.BuildDError("couldn't find table '%s'", tableName).Build(), usage)
}
sch, err := table.GetSchema(ctx)
if err != nil {
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to get table '%s'", tableName).AddCause(err).Build(), usage)
}
cols := make([]schema.Column, sch.GetAllCols().Size())
i := 0
sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
if strings.ToLower(col.Name) == strings.ToLower(column) {
cols[i] = alterColumnType(typ, col)
} else {
cols[i] = col
}
i++
return false, nil
})
collection := schema.NewColCollection(cols...)
newSch, err := schema.SchemaFromCols(collection)
if err != nil {
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to create new schema '%s'", tableName).AddCause(err).Build(), usage)
}
newTable, err := table.UpdateSchema(ctx, newSch)
if err != nil {
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to create new schema '%s'", tableName).AddCause(err).Build(), usage)
}
root, err = root.PutTable(ctx, tableCase, newTable)
if err != nil {
return commands.HandleVErrAndExitCode(errhand.BuildDError("unable to write new table '%s'", tableName).AddCause(err).Build(), usage)
}
return commands.HandleVErrAndExitCode(commands.UpdateWorkingWithVErr(dEnv, root), usage)
}
func alterColumnType(typ string, col schema.Column) schema.Column {
nc := col
typ = strings.ToLower(typ)
switch true {
// TODO: support for other types, nullability
case strings.HasPrefix(typ, "varchar"):
lengthStr := typ[len("varchar")+1 : len(typ)-1]
length, err := strconv.ParseInt(lengthStr, 10, 64)
if err != nil {
panic(err)
}
ti := col.TypeInfo.ToSqlType().(sql.StringType)
sqlType := sql.MustCreateString(sqltypes.VarChar, length, ti.Collation())
nc.TypeInfo, err = typeinfo.FromSqlType(sqlType)
if err != nil {
panic(err)
}
case typ == "tinyint":
nc.TypeInfo = typeinfo.Int8Type
case typ == "smallint":
nc.TypeInfo = typeinfo.Int16Type
case typ == "mediumint":
nc.TypeInfo = typeinfo.Int24Type
case typ == "int", typ == "integer":
nc.TypeInfo = typeinfo.Int32Type
case typ == "bigint":
nc.TypeInfo = typeinfo.Int64Type
case typ == "tinyint unsigned":
nc.TypeInfo = typeinfo.Uint8Type
case typ == "smallint unsigned":
nc.TypeInfo = typeinfo.Uint16Type
case typ == "mediumint unsigned":
nc.TypeInfo = typeinfo.Uint24Type
case typ == "int unsigned", typ == "integer unsigned":
nc.TypeInfo = typeinfo.Uint32Type
case typ == "bigint unsigned":
nc.TypeInfo = typeinfo.Uint64Type
default:
panic(fmt.Sprintf("unsupported type %s", typ))
}
return nc
}
func (cmd ChangeTypeCmd) createArgParser() *argparser.ArgParser {
ap := argparser.NewArgParser()
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"table", "table(s) whose schema is being changed"})
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"column", "column whose type is being changed"})
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"type", "new column type as a SQL type string"})
return ap
}
func (cmd ChangeTypeCmd) CreateMarkdown(fs filesys.Filesys, path, commandStr string) error {
return nil
}
var _ cli.Command = ChangeTypeCmd{}
+25 -22
View File
@@ -413,10 +413,13 @@ func CombineColCollections(ctx context.Context, root *doltdb.RootValue, inferred
return nil, errhand.BuildDError("failed to generate new schema").AddCause(err).Build()
}
combined, err := oldCols.AppendColl(newCols)
combined := oldCols.AppendColl(newCols)
err = schema.ValidateForInsert(combined)
if err != nil {
return nil, errhand.BuildDError("failed to generate new schema").AddCause(err).Build()
return nil, errhand.BuildDError("invalid schema").AddCause(err).Build()
}
sch, err := schema.SchemaFromCols(combined)
if err != nil {
return nil, errhand.BuildDError("failed to get schema from cols").AddCause(err).Build()
@@ -426,9 +429,9 @@ func CombineColCollections(ctx context.Context, root *doltdb.RootValue, inferred
func columnsForSchemaCreate(inferredCols *schema.ColCollection, pkNames []string) (newCols *schema.ColCollection) {
pks := set.NewStrSet(pkNames)
newCols, _ = schema.MapColCollection(inferredCols, func(col schema.Column) (schema.Column, error) {
newCols = schema.MapColCollection(inferredCols, func(col schema.Column) schema.Column {
col.IsPartOfPK = pks.Contains(col.Name)
return col, nil
return col
})
return newCols
}
@@ -453,15 +456,15 @@ func columnsForSchemaUpdate(existingCols, inferredCols *schema.ColCollection, ke
if keepTypes {
oldCols = existingCols
newCols, _ = schema.FilterColCollection(inferredCols, func(col schema.Column) (bool, error) {
return right.Contains(col.Name), nil
newCols = schema.FilterColCollection(inferredCols, func(col schema.Column) bool {
return right.Contains(col.Name)
})
} else {
oldCols, _ = schema.FilterColCollection(existingCols, func(col schema.Column) (bool, error) {
return left.Contains(col.Name) || sameType.Contains(col.Name), nil
oldCols = schema.FilterColCollection(existingCols, func(col schema.Column) bool {
return left.Contains(col.Name) || sameType.Contains(col.Name)
})
newCols, _ = schema.FilterColCollection(inferredCols, func(col schema.Column) (bool, error) {
return !sameType.Contains(col.Name), nil
newCols = schema.FilterColCollection(inferredCols, func(col schema.Column) bool {
return !sameType.Contains(col.Name)
})
}
@@ -492,18 +495,18 @@ func columnsForSchemaReplace(existingCols, inferredCols *schema.ColCollection, k
})
if keepTypes {
oldCols, _ = schema.FilterColCollection(existingCols, func(col schema.Column) (bool, error) {
return inter.Contains(col.Name), nil
oldCols = schema.FilterColCollection(existingCols, func(col schema.Column) bool {
return inter.Contains(col.Name)
})
newCols, _ = schema.FilterColCollection(inferredCols, func(col schema.Column) (bool, error) {
return right.Contains(col.Name), nil
newCols = schema.FilterColCollection(inferredCols, func(col schema.Column) bool {
return right.Contains(col.Name)
})
} else {
oldCols, _ = schema.FilterColCollection(existingCols, func(col schema.Column) (bool, error) {
return sameType.Contains(col.Name), nil
oldCols = schema.FilterColCollection(existingCols, func(col schema.Column) bool {
return sameType.Contains(col.Name)
})
newCols, _ = schema.FilterColCollection(inferredCols, func(col schema.Column) (bool, error) {
return !sameType.Contains(col.Name), nil
newCols = schema.FilterColCollection(inferredCols, func(col schema.Column) bool {
return !sameType.Contains(col.Name)
})
}
@@ -526,11 +529,11 @@ func verifyPKsUnchanged(existingCols, oldCols, newCols *schema.ColCollection) er
return errhand.VerboseErrorFromError(err)
}
existingPKs, _ := schema.FilterColCollection(existingCols, func(col schema.Column) (b bool, err error) {
return col.IsPartOfPK, nil
existingPKs := schema.FilterColCollection(existingCols, func(col schema.Column) bool {
return col.IsPartOfPK
})
newPKs, _ := schema.FilterColCollection(oldCols, func(col schema.Column) (b bool, err error) {
return col.IsPartOfPK, nil
newPKs := schema.FilterColCollection(oldCols, func(col schema.Column) bool {
return col.IsPartOfPK
})
if !schema.ColCollsAreEqual(existingPKs, newPKs) {
return errhand.BuildDError("input primary keys do not match primary keys of existing table").Build()
+1
View File
@@ -23,6 +23,7 @@ import (
var Commands = cli.NewSubCommandHandler("schema", "Commands for showing and importing table schemas.", []cli.Command{
ExportCmd{},
ImportCmd{},
ChangeTypeCmd{},
ShowCmd{},
TagsCmd{},
})
+1 -1
View File
@@ -134,7 +134,7 @@ func CreateTestTable(vrw types.ValueReadWriter, tSchema schema.Schema, rowData t
}
func createTestDocsSchema() schema.Schema {
typedColColl, _ := schema.NewColCollection(
typedColColl := schema.NewColCollection(
schema.NewColumn(doltdb.DocPkColumnName, schema.DocNameTag, types.StringKind, true, schema.NotNullConstraint{}),
schema.NewColumn(doltdb.DocTextColumnName, schema.DocTextTag, types.StringKind, false),
)
@@ -43,8 +43,8 @@ func TestDiffSchemas(t *testing.T) {
schema.NewColumn("added", 6, types.StringKind, false),
}
oldColColl, _ := schema.NewColCollection(oldCols...)
newColColl, _ := schema.NewColCollection(newCols...)
oldColColl := schema.NewColCollection(oldCols...)
newColColl := schema.NewColCollection(newCols...)
oldSch, err := schema.SchemaFromCols(oldColColl)
require.NoError(t, err)
+1 -1
View File
@@ -52,7 +52,7 @@ var id2, _ = uuid.NewRandom()
var id3, _ = uuid.NewRandom()
func createTestSchema(t *testing.T) schema.Schema {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", idTag, types.UUIDKind, true, schema.NotNullConstraint{}),
schema.NewColumn("first", firstTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("last", lastTag, types.StringKind, false, schema.NotNullConstraint{}),
+3 -3
View File
@@ -213,11 +213,11 @@ func (root *RootValue) GenerateTagsForNewColColl(ctx context.Context, tableName
}
idx := 0
return schema.MapColCollection(cc, func(col schema.Column) (column schema.Column, err error) {
return schema.MapColCollection(cc, func(col schema.Column) schema.Column {
col.Tag = newTags[idx]
idx++
return col, nil
})
return col
}), nil
}
// GenerateTagsForNewColumns deterministically generates a slice of new tags that are unique within the history of this root. The names and NomsKinds of
+2 -2
View File
@@ -31,11 +31,11 @@ var ErrDocsUpdate = errors.New("error updating local docs")
var ErrEmptyDocsTable = errors.New("error: All docs removed. Removing Docs Table")
var ErrMarshallingSchema = errors.New("error marshalling schema")
var doltDocsColumns, _ = schema.NewColCollection(
var doltDocsColumns = schema.NewColCollection(
schema.NewColumn(doltdb.DocPkColumnName, schema.DocNameTag, types.StringKind, true, schema.NotNullConstraint{}),
schema.NewColumn(doltdb.DocTextColumnName, schema.DocTextTag, types.StringKind, false),
)
var DoltDocsSchema = schema.MustSchemaFromCols(doltDocsColumns)
var Schema = schema.MustSchemaFromCols(doltDocsColumns)
type Doc struct {
Text []byte
+3 -3
View File
@@ -79,7 +79,7 @@ func updateDocsTable(ctx context.Context, docTbl *doltdb.Table, docs Docs) (*dol
// createDocsTable creates a new in memory table that stores the given doc details.
func createDocsTable(ctx context.Context, vrw types.ValueReadWriter, docs Docs) (*doltdb.Table, error) {
imt := table.NewInMemTable(DoltDocsSchema)
imt := table.NewInMemTable(Schema)
// Determines if the table needs to be created at all and initializes a schema if it does.
createTable := false
@@ -91,7 +91,7 @@ func createDocsTable(ctx context.Context, vrw types.ValueReadWriter, docs Docs)
schema.DocTextTag: types.String(doc.Text),
}
docRow, err := row.New(types.Format_7_18, DoltDocsSchema, docTaggedVals)
docRow, err := row.New(types.Format_7_18, Schema, docTaggedVals)
if err != nil {
return nil, err
}
@@ -104,7 +104,7 @@ func createDocsTable(ctx context.Context, vrw types.ValueReadWriter, docs Docs)
if createTable {
rd := table.NewInMemTableReader(imt)
wr := noms.NewNomsMapCreator(context.Background(), vrw, DoltDocsSchema)
wr := noms.NewNomsMapCreator(context.Background(), vrw, Schema)
_, _, err := table.PipeRows(context.Background(), rd, wr, false)
if err != nil {
+1 -1
View File
@@ -147,7 +147,7 @@ func CreateTestTable(vrw types.ValueReadWriter, tSchema schema.Schema, rowData t
}
func createTestDocsSchema() schema.Schema {
typedColColl, _ := schema.NewColCollection(
typedColColl := schema.NewColCollection(
schema.NewColumn(doltdb.DocPkColumnName, schema.DocNameTag, types.StringKind, true, schema.NotNullConstraint{}),
schema.NewColumn(doltdb.DocTextColumnName, schema.DocTextTag, types.StringKind, false),
)
+3 -3
View File
@@ -51,7 +51,7 @@ const (
IndexName = "idx_name"
)
var typedColColl, _ = schema.NewColCollection(
var typedColColl = schema.NewColCollection(
schema.NewColumn("id", IdTag, types.UUIDKind, true, schema.NotNullConstraint{}),
schema.NewColumn("name", NameTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("age", AgeTag, types.UintKind, false, schema.NotNullConstraint{}),
@@ -163,11 +163,11 @@ func AddColToRows(t *testing.T, rs []row.Row, tag uint64, val types.Value) []row
return rs
}
colColl, err := schema.NewColCollection(schema.NewColumn("unused", tag, val.Kind(), false))
require.NoError(t, err)
colColl := schema.NewColCollection(schema.NewColumn("unused", tag, val.Kind(), false))
fakeSch := schema.UnkeyedSchemaFromCols(colColl)
newRows := make([]row.Row, len(rs))
var err error
for i, r := range rs {
newRows[i], err = r.SetColVal(tag, val, fakeSch)
require.NoError(t, err)
+4 -14
View File
@@ -35,7 +35,7 @@ import (
// CreateSchema returns a schema from the columns given, panicking on any errors.
func CreateSchema(columns ...schema.Column) schema.Schema {
colColl, _ := schema.NewColCollection(columns...)
colColl := schema.NewColCollection(columns...)
return schema.MustSchemaFromCols(colColl)
}
@@ -58,10 +58,7 @@ func NewRow(sch schema.Schema, values ...types.Value) row.Row {
// schema, e.g. tag collision.
func AddColumnToSchema(sch schema.Schema, col schema.Column) schema.Schema {
columns := sch.GetAllCols()
columns, err := columns.Append(col)
if err != nil {
panic(err)
}
columns = columns.Append(col)
return schema.MustSchemaFromCols(columns)
}
@@ -80,10 +77,7 @@ func RemoveColumnFromSchema(sch schema.Schema, tagToRemove uint64) schema.Schema
panic(err)
}
columns, err := schema.NewColCollection(newCols...)
if err != nil {
panic(err)
}
columns := schema.NewColCollection(newCols...)
return schema.MustSchemaFromCols(columns)
}
@@ -145,11 +139,7 @@ func MustSchema(cols ...schema.Column) schema.Schema {
}
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
panic(err)
}
colColl := schema.NewColCollection(cols...)
if !hasPKCols {
return schema.UnkeyedSchemaFromCols(colColl)
+1 -1
View File
@@ -130,7 +130,7 @@ func (inf *inferrer) inferColumnTypes(ctx context.Context, root *doltdb.RootValu
return false, nil
})
return schema.NewColCollection(cols...)
return schema.NewColCollection(cols...), nil
}
func (inf *inferrer) sinkRow(p *pipeline.Pipeline, ch <-chan pipeline.RowWithProps, badRowChan chan<- *pipeline.TransformRowFailure) {
@@ -509,9 +509,3 @@ func TestInferSchema(t *testing.T) {
})
}
}
func mustColColl(t *testing.T, cols ...schema.Column) *schema.ColCollection {
cc, err := schema.NewColCollection(cols...)
require.NoError(t, err)
return cc
}
@@ -74,11 +74,7 @@ var createPeopleTable = `
);`
func columnCollection(cols ...schema.Column) *schema.ColCollection {
pcc, err := schema.NewColCollection(cols...)
if err != nil {
panic(err)
}
return pcc
return schema.NewColCollection(cols...)
}
func newRow(vals row.TaggedValues, cc *schema.ColCollection) row.Row {
+10 -13
View File
@@ -192,10 +192,7 @@ func ForeignKeysMerge(ctx context.Context, mergedRoot, ourRoot, theirRoot, ancRo
func mergeColumns(ourCC, theirCC, ancCC *schema.ColCollection) (merged *schema.ColCollection, conflicts []ColConflict, err error) {
var common *schema.ColCollection
common, conflicts, err = columnsInCommon(ourCC, theirCC, ancCC)
if err != nil {
return nil, nil, err
}
common, conflicts = columnsInCommon(ourCC, theirCC, ancCC)
ourNewCols := schema.ColCollectionSetDifference(ourCC, ancCC)
theirNewCols := schema.ColCollectionSetDifference(theirCC, ancCC)
@@ -228,8 +225,8 @@ func mergeColumns(ourCC, theirCC, ancCC *schema.ColCollection) (merged *schema.C
return merged, conflicts, nil
}
func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schema.ColCollection, conflicts []ColConflict, err error) {
common, _ = schema.NewColCollection()
func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schema.ColCollection, conflicts []ColConflict) {
common = schema.NewColCollection()
_ = ourCC.Iter(func(tag uint64, ourCol schema.Column) (stop bool, err error) {
theirCol, ok := theirCC.GetByTag(ourCol.Tag)
if !ok {
@@ -237,8 +234,8 @@ func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schem
}
if ourCol.Equals(theirCol) {
common, err = common.Append(ourCol)
return false, err
common = common.Append(ourCol)
return false, nil
}
ancCol, ok := ancCC.GetByTag(ourCol.Tag)
@@ -262,9 +259,9 @@ func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schem
Theirs: col,
})
} else {
common, err = common.Append(ourCol)
common = common.Append(ourCol)
}
return false, err
return false, nil
}
if ancCol.Equals(ourCol) {
@@ -277,9 +274,9 @@ func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schem
Theirs: theirCol,
})
} else {
common, err = common.Append(theirCol)
common = common.Append(theirCol)
}
return false, err
return false, nil
}
// col modified on our branch and their branch with different def
@@ -291,7 +288,7 @@ func columnsInCommon(ourCC, theirCC, ancCC *schema.ColCollection) (common *schem
return false, nil
})
return common, conflicts, err
return common, conflicts
}
// assumes indexes are unique over their column sets
+2 -2
View File
@@ -111,7 +111,7 @@ func createRowMergeStruct(name string, vals, mergeVals, ancVals, expected []type
cols[tag] = schema.NewColumn(strconv.FormatInt(int64(tag), 10), uint64(tag), val.Kind(), false)
}
colColl, _ := schema.NewColCollection(cols...)
colColl := schema.NewColCollection(cols...)
sch := schema.MustSchemaFromCols(colColl)
tpl := valsToTestTupleWithPks(vals)
@@ -225,7 +225,7 @@ const (
titleTag = 1
)
var colColl, _ = schema.NewColCollection(
var colColl = schema.NewColCollection(
schema.NewColumn("id", idTag, types.UUIDKind, true, schema.NotNullConstraint{}),
schema.NewColumn("name", nameTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("title", titleTag, types.StringKind, false),
@@ -421,11 +421,7 @@ var mergeForeignKeyTests = []mergeForeignKeyTest{
}
func colCollection(cols ...schema.Column) *schema.ColCollection {
pcc, err := schema.NewColCollection(cols...)
if err != nil {
panic(err)
}
return pcc
return schema.NewColCollection(cols...)
}
// SchemaFromColsAndIdxs creates a Schema from a ColCollection and an IndexCollection.
@@ -95,7 +95,7 @@ func TestBasics(t *testing.T) {
}
}
var fakeFields, _ = schema.NewColCollection(
var fakeFields = schema.NewColCollection(
schema.NewColumn("a", 0, types.StringKind, true, schema.NotNullConstraint{}),
schema.NewColumn("b", 1, types.StringKind, false),
)
+7 -2
View File
@@ -259,7 +259,7 @@ func InferSchema(ctx context.Context, root *doltdb.RootValue, rd table.TableRead
}
pkSet := set.NewStrSet(pks)
newCols, _ := schema.MapColCollection(infCols, func(col schema.Column) (schema.Column, error) {
newCols := schema.MapColCollection(infCols, func(col schema.Column) schema.Column {
col.IsPartOfPK = pkSet.Contains(col.Name)
if col.IsPartOfPK {
hasNotNull := false
@@ -273,7 +273,7 @@ func InferSchema(ctx context.Context, root *doltdb.RootValue, rd table.TableRead
col.Constraints = append(col.Constraints, schema.NotNullConstraint{})
}
}
return col, nil
return col
})
// check that all provided primary keys are being used
@@ -289,5 +289,10 @@ func InferSchema(ctx context.Context, root *doltdb.RootValue, rd table.TableRead
return nil, errhand.BuildDError("failed to generate new schema").AddCause(err).Build()
}
err = schema.ValidateForInsert(newCols)
if err != nil {
return nil, errhand.BuildDError("invalid schema").AddCause(err).Build()
}
return schema.SchemaFromCols(newCols)
}
+2 -5
View File
@@ -255,15 +255,12 @@ func replayCommitWithNewTag(ctx context.Context, root, parentRoot, rebasedParent
parentTblName := tblName
// schema rebase
schCC, _ := schema.NewColCollection()
schCC := schema.NewColCollection()
err = sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
if newTag, found := tableMapping[tag]; found {
col.Tag = newTag
}
schCC, err = schCC.Append(col)
if err != nil {
return true, err
}
schCC = schCC.Append(col)
return false, nil
})
+2 -2
View File
@@ -60,8 +60,8 @@ var testCols = []schema.Column{
{Name: titleColName, Tag: titleColTag, Kind: types.StringKind, IsPartOfPK: false, TypeInfo: typeinfo.StringDefaultType, Constraints: nil},
{Name: reservedColName, Tag: reservedColTag, Kind: types.StringKind, IsPartOfPK: false, TypeInfo: typeinfo.StringDefaultType, Constraints: nil},
}
var testKeyColColl, _ = schema.NewColCollection(testKeyCols...)
var testNonKeyColColl, _ = schema.NewColCollection(testCols...)
var testKeyColColl = schema.NewColCollection(testKeyCols...)
var testNonKeyColColl = schema.NewColCollection(testCols...)
var sch, _ = schema.SchemaFromPKAndNonPKCols(testKeyColColl, testNonKeyColColl)
var index schema.Index
+1 -1
View File
@@ -101,7 +101,7 @@ func TestIsValid(t *testing.T) {
nonPkCols := []schema.Column{
{Name: addrColName, Tag: addrColTag, Kind: types.BoolKind, IsPartOfPK: false, Constraints: nil},
}
nonKeyColColl, _ := schema.NewColCollection(nonPkCols...)
nonKeyColColl := schema.NewColCollection(nonPkCols...)
newSch, err := schema.SchemaFromPKAndNonPKCols(testKeyColColl, nonKeyColColl)
require.NoError(t, err)
@@ -23,24 +23,24 @@ import (
"github.com/dolthub/dolt/go/store/types"
)
var fieldsA, _ = schema.NewColCollection(
var fieldsA = schema.NewColCollection(
schema.NewColumn("a", 0, types.StringKind, true),
schema.NewColumn("b", 1, types.StringKind, false),
schema.NewColumn("c", 2, types.StringKind, false))
var fieldsB, _ = schema.NewColCollection(
var fieldsB = schema.NewColCollection(
schema.NewColumn("a", 0, types.StringKind, true),
schema.NewColumn("b", 1, types.StringKind, false))
var fieldsC, _ = schema.NewColCollection(
var fieldsC = schema.NewColCollection(
schema.NewColumn("key", 3, types.UUIDKind, true),
schema.NewColumn("value", 4, types.StringKind, false))
var fieldsCNoPK, _ = schema.NewColCollection(
var fieldsCNoPK = schema.NewColCollection(
schema.NewColumn("key", 3, types.UUIDKind, true),
schema.NewColumn("value", 4, types.StringKind, false))
var fieldsD, _ = schema.NewColCollection(
var fieldsD = schema.NewColCollection(
schema.NewColumn("key", 3, types.StringKind, true),
schema.NewColumn("value", 4, types.StringKind, false))
+1 -5
View File
@@ -90,11 +90,7 @@ func NewJoiner(namedSchemas []NamedSchema, namers map[string]ColNamingFunc) (*Jo
return nil, err
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
joined := schema.UnkeyedSchemaFromCols(colColl)
+1 -1
View File
@@ -31,7 +31,7 @@ const (
cityTag
)
var peopleCols, _ = schema.NewColCollection(
var peopleCols = schema.NewColCollection(
schema.NewColumn("last", lastTag, types.StringKind, true),
schema.NewColumn("first", firstTag, types.StringKind, true),
schema.NewColumn("age", ageTag, types.IntKind, false),
@@ -30,7 +30,7 @@ import (
"github.com/dolthub/dolt/go/store/types"
)
var srcCols, _ = schema.NewColCollection(
var srcCols = schema.NewColCollection(
schema.NewColumn("uuidtostr", 0, types.UUIDKind, true),
schema.NewColumn("floattostr", 1, types.FloatKind, false),
schema.NewColumn("uinttostr", 2, types.UintKind, false),
@@ -108,7 +108,7 @@ func TestSpecialBoolHandling(t *testing.T) {
require.NoError(t, err)
col2, err := schema.NewColumnWithTypeInfo("v", 1, typeinfo.PseudoBoolType, false, "", false, "")
require.NoError(t, err)
colColl, _ := schema.NewColCollection(col1, col2)
colColl := schema.NewColCollection(col1, col2)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
untypedSch, err := untyped.UntypeSchema(sch)
@@ -17,6 +17,7 @@ package alterschema
import (
"context"
"fmt"
"strings"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
@@ -155,10 +156,13 @@ func addColumnToSchema(sch schema.Schema, tag uint64, newColName string, typeInf
newCols = append(newCols, newCol)
}
collection, err := schema.NewColCollection(newCols...)
collection := schema.NewColCollection(newCols...)
err = schema.ValidateForInsert(collection)
if err != nil {
return nil, err
}
newSch, err := schema.SchemaFromCols(collection)
if err != nil {
return nil, err
@@ -192,8 +196,7 @@ func validateNewColumn(ctx context.Context, root *doltdb.RootValue, tbl *doltdb.
err = cols.Iter(func(currColTag uint64, currCol schema.Column) (stop bool, err error) {
if currColTag == tag {
return false, schema.ErrTagPrevUsed(tag, newColName, tblName)
} else if currCol.Name == newColName {
} else if strings.ToLower(currCol.Name) == strings.ToLower(newColName) {
return true, fmt.Errorf("A column with the name %s already exists in table %s.", newColName, tblName)
}
@@ -88,11 +88,7 @@ func DropColumn(ctx context.Context, tbl *doltdb.Table, colName string, foreignK
return nil, err
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
newSch, err := schema.SchemaFromCols(colColl)
if err != nil {
return nil, err
@@ -18,6 +18,7 @@ import (
"context"
"errors"
"fmt"
"strings"
"github.com/dolthub/dolt/go/libraries/doltcore/table/editor"
@@ -94,7 +95,7 @@ func validateModifyColumn(ctx context.Context, tbl *doltdb.Table, existingCol sc
err = cols.Iter(func(currColTag uint64, currCol schema.Column) (stop bool, err error) {
if currColTag == modifiedCol.Tag {
return false, nil
} else if currCol.Name == modifiedCol.Name {
} else if strings.ToLower(currCol.Name) == strings.ToLower(modifiedCol.Name) {
return true, fmt.Errorf("A column with the name %s already exists.", modifiedCol.Name)
}
@@ -192,7 +193,9 @@ func replaceColumnInSchema(sch schema.Schema, oldCol schema.Column, newCol schem
return false, nil
})
collection, err := schema.NewColCollection(newCols...)
collection := schema.NewColCollection(newCols...)
err := schema.ValidateForInsert(collection)
if err != nil {
return nil, err
}
@@ -30,7 +30,7 @@ import (
func TestRenameTable(t *testing.T) {
otherTable := "other"
cc, _ := schema.NewColCollection(
cc := schema.NewColCollection(
schema.NewColumn("id", uint64(100), types.UUIDKind, true, schema.NotNullConstraint{}),
)
otherSch, err := schema.SchemaFromCols(cc)
+51 -55
View File
@@ -63,11 +63,12 @@ type ColCollection struct {
TagToIdx map[uint64]int
}
// NewColCollection creates a new collection from a list of columns. All columns must have unique tags or this method
// returns an error. If any columns have the same name, by-name lookups from this collection will not function
// correctly, and this column collection cannot be used to create a schema. If any columns have the same case-
// insensitive name, case-insensitive lookups will be unable to return the correct column in all cases.
func NewColCollection(cols ...Column) (*ColCollection, error) {
// NewColCollection creates a new collection from a list of columns. If any columns have the same tag, by-tag lookups in
// this collection will not function correctly. If any columns have the same name, by-name lookups from this collection
// will not function correctly. If any columns have the same case-insensitive name, case-insensitive lookups will be
// unable to return the correct column in all cases.
// For this collection to be used as a Dolt schema, it must pass schema.ValidateForInsert.
func NewColCollection(cols ...Column) *ColCollection {
var tags []uint64
var sortedTags []uint64
@@ -76,37 +77,34 @@ func NewColCollection(cols ...Column) (*ColCollection, error) {
lowerNameToCol := make(map[string]Column, len(cols))
tagToIdx := make(map[uint64]int, len(cols))
var uniqueCols []Column
for i, col := range cols {
if val, ok := tagToCol[col.Tag]; !ok {
uniqueCols = append(uniqueCols, col)
tagToCol[col.Tag] = col
tagToIdx[col.Tag] = i
tags = append(tags, col.Tag)
sortedTags = append(sortedTags, col.Tag)
nameToCol[col.Name] = cols[i]
// If multiple columns have the same tag, the last one is used for tag lookups.
// Columns must have unique tags to pass schema.ValidateForInsert.
tagToCol[col.Tag] = col
tagToIdx[col.Tag] = i
tags = append(tags, col.Tag)
sortedTags = append(sortedTags, col.Tag)
nameToCol[col.Name] = cols[i]
// If multiple columns have the same lower case name, the first one is used for case-insensitive matching.
lowerCaseName := strings.ToLower(col.Name)
if _, ok := lowerNameToCol[lowerCaseName]; !ok {
lowerNameToCol[lowerCaseName] = cols[i]
}
} else if !val.Equals(col) {
return nil, ErrColTagCollision
// If multiple columns have the same lower case name, the first one is used for case-insensitive matching.
// Column names must all be case-insensitive different to pass schema.ValidateForInsert.
lowerCaseName := strings.ToLower(col.Name)
if _, ok := lowerNameToCol[lowerCaseName]; !ok {
lowerNameToCol[lowerCaseName] = cols[i]
}
}
sort.Slice(sortedTags, func(i, j int) bool { return sortedTags[i] < sortedTags[j] })
return &ColCollection{
cols: uniqueCols,
cols: cols,
Tags: tags,
SortedTags: sortedTags,
TagToCol: tagToCol,
NameToCol: nameToCol,
LowerNameToCol: lowerNameToCol,
TagToIdx: tagToIdx,
}, nil
}
}
// GetColumns returns the underlying list of columns. The list returned is a copy.
@@ -130,12 +128,12 @@ func (cc *ColCollection) GetColumnNames() []string {
}
// AppendColl returns a new collection with the additional ColCollection's columns appended
func (cc *ColCollection) AppendColl(colColl *ColCollection) (*ColCollection, error) {
func (cc *ColCollection) AppendColl(colColl *ColCollection) *ColCollection {
return cc.Append(colColl.cols...)
}
// Append returns a new collection with the additional columns appended
func (cc *ColCollection) Append(cols ...Column) (*ColCollection, error) {
func (cc *ColCollection) Append(cols ...Column) *ColCollection {
allCols := make([]Column, 0, len(cols)+len(cc.cols))
allCols = append(allCols, cc.cols...)
allCols = append(allCols, cols...)
@@ -143,21 +141,6 @@ func (cc *ColCollection) Append(cols ...Column) (*ColCollection, error) {
return NewColCollection(allCols...)
}
// Replace will replace one column of the schema with another.
func (cc *ColCollection) Replace(old, new Column) (*ColCollection, error) {
allCols := make([]Column, 0, len(cc.cols))
for _, curr := range cc.cols {
if curr.Tag == old.Tag {
allCols = append(allCols, new)
} else {
allCols = append(allCols, curr)
}
}
return NewColCollection(allCols...)
}
// Iter iterates over all the columns in the supplied ordering
func (cc *ColCollection) Iter(cb func(tag uint64, col Column) (stop bool, err error)) error {
for _, col := range cc.cols {
@@ -249,29 +232,43 @@ func ColCollsAreEqual(cc1, cc2 *ColCollection) bool {
return areEqual
}
// ColCollsAreCompatible determines whether two ColCollections are compatible with each other. Compatible columns have
// the same tags and storage types, but may have different names, constraints or SQL type parameters.
func ColCollsAreCompatible(cc1, cc2 *ColCollection) bool {
if cc1.Size() != cc2.Size() {
return false
}
areCompatible := true
_ = cc1.Iter(func(tag uint64, col1 Column) (stop bool, err error) {
col2, ok := cc2.GetByTag(tag)
if !ok || !col1.Compatible(col2) {
areCompatible = false
return true, nil
}
return false, nil
})
return areCompatible
}
// MapColCollection applies a function to each column in a ColCollection and creates a new ColCollection from the results.
func MapColCollection(cc *ColCollection, cb func(col Column) (Column, error)) (*ColCollection, error) {
func MapColCollection(cc *ColCollection, cb func(col Column) Column) *ColCollection {
mapped := make([]Column, cc.Size())
for i, c := range cc.cols {
mc, err := cb(c)
if err != nil {
return nil, err
}
mapped[i] = mc
mapped[i] = cb(c)
}
return NewColCollection(mapped...)
}
// FilterColCollection applies a boolean function to column in a ColCollection, it creates a new ColCollection from the
// set of columns for which the function returned true.
func FilterColCollection(cc *ColCollection, cb func(col Column) (bool, error)) (*ColCollection, error) {
func FilterColCollection(cc *ColCollection, cb func(col Column) bool) *ColCollection {
filtered := make([]Column, 0, cc.Size())
for _, c := range cc.cols {
keep, err := cb(c)
if err != nil {
return nil, err
}
if keep {
if cb(c) {
filtered = append(filtered, c)
}
}
@@ -280,7 +277,6 @@ func FilterColCollection(cc *ColCollection, cb func(col Column) (bool, error)) (
func ColCollUnion(colColls ...*ColCollection) (*ColCollection, error) {
var allCols []Column
// TODO: error on tag collision
for _, sch := range colColls {
err := sch.Iter(func(tag uint64, col Column) (stop bool, err error) {
allCols = append(allCols, col)
@@ -292,14 +288,14 @@ func ColCollUnion(colColls ...*ColCollection) (*ColCollection, error) {
}
}
return NewColCollection(allCols...)
return NewColCollection(allCols...), nil
}
// ColCollectionSetDifference returns the set difference leftCC - rightCC.
func ColCollectionSetDifference(leftCC, rightCC *ColCollection) (d *ColCollection) {
d, _ = FilterColCollection(leftCC, func(col Column) (b bool, err error) {
d = FilterColCollection(leftCC, func(col Column) bool {
_, ok := rightCC.GetByTag(col.Tag)
return !ok, nil
return !ok
})
return d
}
+4 -29
View File
@@ -20,7 +20,6 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/dolthub/dolt/go/store/types"
@@ -33,8 +32,7 @@ var lastNameCapsCol = Column{"LAST", 3, types.StringKind, false, typeinfo.String
func TestGetByNameAndTag(t *testing.T) {
cols := []Column{firstNameCol, lastNameCol, firstNameCapsCol, lastNameCapsCol}
colColl, err := NewColCollection(cols...)
require.NoError(t, err)
colColl := NewColCollection(cols...)
tests := []struct {
name string
@@ -73,8 +71,7 @@ func TestGetByNameAndTag(t *testing.T) {
func TestGetByNameCaseInsensitive(t *testing.T) {
cols := []Column{firstNameCol, lastNameCol, firstNameCapsCol, lastNameCapsCol}
colColl, err := NewColCollection(cols...)
require.NoError(t, err)
colColl := NewColCollection(cols...)
tests := []struct {
name string
@@ -103,28 +100,6 @@ func TestGetByNameCaseInsensitive(t *testing.T) {
}
}
func TestNewColCollectionErrorHandling(t *testing.T) {
tests := []struct {
name string
cols []Column
expectedErr error
}{
{
name: "tag collision",
cols: []Column{firstNameCol, lastNameCol, {"collision", 0, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil}},
expectedErr: ErrColTagCollision,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
_, err := NewColCollection(test.cols...)
assert.Error(t, err)
assert.Equal(t, err, test.expectedErr)
})
}
}
func TestAppendAndItrInSortOrder(t *testing.T) {
cols := []Column{
{"0", 0, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil},
@@ -141,9 +116,9 @@ func TestAppendAndItrInSortOrder(t *testing.T) {
{"6", 6, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil},
}
colColl, _ := NewColCollection(cols...)
colColl := NewColCollection(cols...)
validateIter(len(cols), colColl, t)
colColl2, _ := colColl.Append(cols2...)
colColl2 := colColl.Append(cols2...)
validateIter(len(cols), colColl, t) //validate immutability
validateIter(len(cols)+len(cols2), colColl2, t)
}
+8
View File
@@ -145,6 +145,14 @@ func (c Column) Equals(other Column) bool {
ColConstraintsAreEqual(c.Constraints, other.Constraints)
}
// Compatible tests compatibility between two columns. Compatible columns have the same tag and can store the same
// kinds of values at the storage layer, but may have different constraints or type parameters.
func (c Column) Compatible(other Column) bool {
return c.Tag == other.Tag &&
c.Kind == other.Kind &&
c.IsPartOfPK == other.IsPartOfPK
}
// KindString returns the string representation of the NomsKind stored in the column.
func (c Column) KindString() string {
return KindToLwrStr[c.Kind]
@@ -193,11 +193,7 @@ func (sd schemaData) decodeSchema() (schema.Schema, error) {
}
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
sch, err := schema.SchemaFromCols(colColl)
if err != nil {
@@ -224,6 +220,13 @@ func (sd schemaData) decodeSchema() (schema.Schema, error) {
// MarshalSchemaAsNomsValue takes a Schema and converts it to a types.Value
func MarshalSchemaAsNomsValue(ctx context.Context, vrw types.ValueReadWriter, sch schema.Schema) (types.Value, error) {
// Anyone calling this is going to serialize this to disk, so it's our last line of defense against defective schemas.
// Business logic should catch errors before this point, but this is a failsafe.
err := schema.ValidateForInsert(sch.GetAllCols())
if err != nil {
return nil, err
}
sd, err := toSchemaData(sch)
if err != nil {
@@ -292,11 +295,7 @@ func (ssd superSchemaData) decodeSuperSchema() (*schema.SuperSchema, error) {
cols[i] = c
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
if ssd.TagNames == nil {
ssd.TagNames = make(map[uint64][]string)
@@ -42,7 +42,7 @@ func createTestSchema() schema.Schema {
schema.NewColumn("age", 3, types.UintKind, false),
}
colColl, _ := schema.NewColCollection(columns...)
colColl := schema.NewColCollection(columns...)
sch := schema.MustSchemaFromCols(colColl)
_, _ = sch.Indexes().AddIndexByColTags("idx_age", []uint64{3}, schema.IndexProperties{IsUnique: false, Comment: ""})
return sch
@@ -148,8 +148,7 @@ func TestTypeInfoMarshalling(t *testing.T) {
require.NoError(t, err)
col, err := schema.NewColumnWithTypeInfo("pk", 1, ti, true, "", false, "")
require.NoError(t, err)
colColl, err := schema.NewColCollection(col)
require.NoError(t, err)
colColl := schema.NewColCollection(col)
originalSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -257,11 +256,7 @@ func (tsd testSchemaData) decodeSchema() (schema.Schema, error) {
}
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
sch, err := schema.SchemaFromCols(colColl)
if err != nil {
+2 -2
View File
@@ -164,8 +164,8 @@ func (ix *indexImpl) Schema() Schema {
Constraints: nil,
}
}
allCols, _ := NewColCollection(cols...)
nonPkCols, _ := NewColCollection()
allCols := NewColCollection(cols...)
nonPkCols := NewColCollection()
return &schemaImpl{
pkCols: allCols,
nonPKCols: nonPkCols,
+7 -13
View File
@@ -24,14 +24,13 @@ import (
)
func TestIndexCollectionAddIndex(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
testIndexes := []*indexImpl{
@@ -138,14 +137,13 @@ func TestIndexCollectionAddIndex(t *testing.T) {
}
func TestIndexCollectionAddIndexByColNames(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
testIndexes := []struct {
@@ -237,14 +235,13 @@ func TestIndexCollectionAddIndexByColNames(t *testing.T) {
}
func TestIndexCollectionAddIndexByColTags(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
testIndexes := []*indexImpl{
@@ -321,21 +318,20 @@ func TestIndexCollectionAddIndexByColTags(t *testing.T) {
}
func TestIndexCollectionAllIndexes(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
indexColl.AddIndex(&indexImpl{
name: "idx_z",
tags: []uint64{3},
})
_, err = indexColl.AddIndexByColNames("idx_a", []string{"v2"}, IndexProperties{IsUnique: false, Comment: ""})
_, err := indexColl.AddIndexByColNames("idx_a", []string{"v2"}, IndexProperties{IsUnique: false, Comment: ""})
require.NoError(t, err)
_, err = indexColl.AddIndexByColTags("idx_n", []uint64{5}, IndexProperties{IsUnique: false, Comment: "hello there"})
require.NoError(t, err)
@@ -369,14 +365,13 @@ func TestIndexCollectionAllIndexes(t *testing.T) {
}
func TestIndexCollectionRemoveIndex(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
testIndexes := []Index{
@@ -423,14 +418,13 @@ func TestIndexCollectionRemoveIndex(t *testing.T) {
}
func TestIndexCollectionRenameIndex(t *testing.T) {
colColl, err := NewColCollection(
colColl := NewColCollection(
NewColumn("pk1", 1, types.IntKind, true, NotNullConstraint{}),
NewColumn("pk2", 2, types.IntKind, true, NotNullConstraint{}),
NewColumn("v1", 3, types.IntKind, false),
NewColumn("v2", 4, types.UintKind, false),
NewColumn("v3", 5, types.StringKind, false),
)
require.NoError(t, err)
indexColl := NewIndexCollection(colColl).(*indexCollectionImpl)
index := &indexImpl{
name: "idx_a",
+6 -10
View File
@@ -51,8 +51,8 @@ func SchemaFromCols(allCols *ColCollection) (Schema, error) {
return nil, ErrNoPrimaryKeyColumns
}
pkColColl, _ := NewColCollection(pkCols...)
nonPKColColl, _ := NewColCollection(nonPKCols...)
pkColColl := NewColCollection(pkCols...)
nonPKColColl := NewColCollection(nonPKCols...)
return &schemaImpl{
pkCols: pkColColl,
@@ -93,7 +93,7 @@ func ValidateForInsert(allCols *ColCollection) error {
}
colTags[tag] = true
if _, ok := colNames[col.Name]; ok {
if _, ok := colNames[strings.ToLower(col.Name)]; ok {
return true, ErrColNameCollision
}
colNames[col.Name] = true
@@ -115,8 +115,8 @@ func UnkeyedSchemaFromCols(allCols *ColCollection) Schema {
nonPKCols = append(nonPKCols, c)
}
pkColColl, _ := NewColCollection()
nonPKColColl, _ := NewColCollection(nonPKCols...)
pkColColl := NewColCollection()
nonPKColColl := NewColCollection(nonPKCols...)
return &schemaImpl{
pkCols: pkColColl,
@@ -149,11 +149,7 @@ func SchemaFromPKAndNonPKCols(pkCols, nonPKCols *ColCollection) (Schema, error)
i++
}
allColColl, err := NewColCollection(allCols...)
if err != nil {
return nil, err
}
allColColl := NewColCollection(allCols...)
return &schemaImpl{
pkCols: pkCols,
+29 -17
View File
@@ -16,6 +16,7 @@ package schema
import (
"reflect"
"strings"
"testing"
"github.com/stretchr/testify/assert"
@@ -60,15 +61,14 @@ var nonPkCols = []Column{
var allCols = append(append([]Column(nil), pkCols...), nonPkCols...)
func TestSchema(t *testing.T) {
colColl, err := NewColCollection(allCols...)
require.NoError(t, err)
colColl := NewColCollection(allCols...)
schFromCols, err := SchemaFromCols(colColl)
require.NoError(t, err)
testSchema("SchemaFromCols", schFromCols, t)
testKeyColColl, _ := NewColCollection(pkCols...)
testNonKeyColsColl, _ := NewColCollection(nonPkCols...)
testKeyColColl := NewColCollection(pkCols...)
testNonKeyColsColl := NewColCollection(nonPkCols...)
schFromPKAndNonPKCols, _ := SchemaFromPKAndNonPKCols(testKeyColColl, testNonKeyColsColl)
testSchema("SchemaFromPKAndNonPKCols", schFromPKAndNonPKCols, t)
@@ -79,10 +79,8 @@ func TestSchema(t *testing.T) {
}
func TestSchemaWithNoPKs(t *testing.T) {
colColl, err := NewColCollection(nonPkCols...)
require.NoError(t, err)
_, err = SchemaFromCols(colColl)
colColl := NewColCollection(nonPkCols...)
_, _ = SchemaFromCols(colColl)
assert.NotPanics(t, func() {
UnkeyedSchemaFromCols(colColl)
@@ -90,16 +88,14 @@ func TestSchemaWithNoPKs(t *testing.T) {
}
func TestIsKeyless(t *testing.T) {
cc, err := NewColCollection(allCols...)
require.NoError(t, err)
cc := NewColCollection(allCols...)
pkSch, err := SchemaFromCols(cc)
require.NoError(t, err)
ok := IsKeyless(pkSch)
assert.False(t, ok)
cc, err = NewColCollection(nonPkCols...)
require.NoError(t, err)
cc = NewColCollection(nonPkCols...)
keylessSch, err := SchemaFromCols(cc)
assert.NoError(t, err)
@@ -110,20 +106,36 @@ func TestIsKeyless(t *testing.T) {
func TestValidateForInsert(t *testing.T) {
t.Run("Validate good", func(t *testing.T) {
colColl, err := NewColCollection(allCols...)
require.NoError(t, err)
colColl := NewColCollection(allCols...)
assert.NoError(t, ValidateForInsert(colColl))
})
t.Run("Name collision", func(t *testing.T) {
cols := append(allCols, Column{titleColName, 100, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil})
colColl, err := NewColCollection(cols...)
require.NoError(t, err)
colColl := NewColCollection(cols...)
err = ValidateForInsert(colColl)
err := ValidateForInsert(colColl)
assert.Error(t, err)
assert.Equal(t, err, ErrColNameCollision)
})
t.Run("Case insensitive collision", func(t *testing.T) {
cols := append(allCols, Column{strings.ToUpper(titleColName), 100, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil})
colColl := NewColCollection(cols...)
err := ValidateForInsert(colColl)
assert.Error(t, err)
assert.Equal(t, err, ErrColNameCollision)
})
t.Run("Tag collision", func(t *testing.T) {
cols := append(allCols, Column{"newCol", lnColTag, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil})
colColl := NewColCollection(cols...)
err := ValidateForInsert(colColl)
assert.Error(t, err)
assert.Equal(t, err, ErrColTagCollision)
})
}
func testSchema(method string, sch Schema, t *testing.T) {
+8 -14
View File
@@ -37,7 +37,7 @@ type SuperSchema struct {
// NewSuperSchema creates a SuperSchema from the columns of schemas.
func NewSuperSchema(schemas ...Schema) (*SuperSchema, error) {
cc, _ := NewColCollection()
cc := NewColCollection()
tn := make(map[uint64][]string)
ss := SuperSchema{cc, tn}
@@ -62,9 +62,7 @@ func (ss *SuperSchema) AddColumn(col Column) (err error) {
ac := ss.allCols
existingCol, found := ac.GetByTag(ct)
if found {
if col.IsPartOfPK != existingCol.IsPartOfPK ||
col.Kind != existingCol.Kind ||
!col.TypeInfo.Equals(existingCol.TypeInfo) {
if !existingCol.Compatible(col) {
ecName := ss.tagNames[col.Tag][0]
return fmt.Errorf("tag collision for columns %s and %s, different definitions (tag: %d)",
ecName, col.Name, col.Tag)
@@ -85,7 +83,7 @@ func (ss *SuperSchema) AddColumn(col Column) (err error) {
// we haven't seen this column before
ss.tagNames[col.Tag] = append(names, col.Name)
ss.allCols, err = ss.allCols.Append(simpleColumn(col))
ss.allCols = ss.allCols.Append(simpleColumn(col))
return err
}
@@ -204,10 +202,10 @@ func (ss *SuperSchema) nameColumns() map[uint64]string {
// Each column is assigned its latest name from its name history.
func (ss *SuperSchema) GenerateColCollection() (*ColCollection, error) {
uniqNames := ss.nameColumns()
cc, _ := NewColCollection()
cc := NewColCollection()
err := ss.Iter(func(tag uint64, col Column) (stop bool, err error) {
col.Name = uniqNames[tag]
cc, err = cc.Append(col)
cc = cc.Append(col)
stop = err != nil
return stop, err
})
@@ -270,18 +268,14 @@ func (ss *SuperSchema) RebaseTag(tagMapping map[uint64]uint64) (*SuperSchema, er
return nil, err
}
ac, err := NewColCollection(cc...)
if err != nil {
return nil, err
}
ac := NewColCollection(cc...)
return &SuperSchema{ac, tn}, nil
}
// SuperSchemaUnion combines multiple SuperSchemas.
func SuperSchemaUnion(superSchemas ...*SuperSchema) (*SuperSchema, error) {
cc, _ := NewColCollection()
cc := NewColCollection()
tagNameSets := make(map[uint64]*set.StrSet)
latestNames := make(map[uint64]string)
for _, ss := range superSchemas {
@@ -290,7 +284,7 @@ func SuperSchemaUnion(superSchemas ...*SuperSchema) (*SuperSchema, error) {
if !found {
tagNameSets[tag] = set.NewStrSet(ss.AllColumnNames(tag))
cc, err = cc.Append(simpleColumn(col))
cc = cc.Append(simpleColumn(col))
} else {
tagNameSets[tag].Add(ss.AllColumnNames(tag)...)
}
@@ -230,11 +230,7 @@ func mustSchema(cols []Column) Schema {
}
func mustColColl(cols []Column) *ColCollection {
cc, err := NewColCollection(cols...)
if err != nil {
panic(err)
}
return cc
return NewColCollection(cols...)
}
func strCol(name string, tag uint64, isPK bool) Column {
@@ -86,7 +86,7 @@ var untypedAppearacesSch, _ = untyped.UntypeUnkeySchema(AppearancesTestSchema)
var AppearancesTableName = "appearances"
func createPeopleTestSchema() schema.Schema {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", IdTag, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("first_name", FirstNameTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("last_name", LastNameTag, types.StringKind, false, schema.NotNullConstraint{}),
@@ -100,7 +100,7 @@ func createPeopleTestSchema() schema.Schema {
}
func createEpisodesTestSchema() schema.Schema {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", EpisodeIdTag, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("name", EpNameTag, types.StringKind, false, schema.NotNullConstraint{}),
newColumnWithTypeInfo("air_date", EpAirDateTag, typeinfo.DatetimeType, false),
@@ -110,7 +110,7 @@ func createEpisodesTestSchema() schema.Schema {
}
func createAppearancesTestSchema() schema.Schema {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("character_id", AppCharacterTag, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("episode_id", AppEpTag, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("comments", AppCommentsTag, types.StringKind, false),
+1 -1
View File
@@ -123,7 +123,7 @@ func SubsetSchema(sch schema.Schema, colNames ...string) schema.Schema {
cols = append(cols, col)
}
}
colColl, _ := schema.NewColCollection(cols...)
colColl := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colColl)
}
@@ -388,17 +388,13 @@ func selectFuncForFilters(nbf *types.NomsBinFormat, filters []sql.Expression) (p
fromCommitDateTag
)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn(toCommit, toCommitTag, types.StringKind, false),
schema.NewColumn(fromCommit, fromCommitTag, types.StringKind, false),
schema.NewColumn(toCommitDate, toCommitDateTag, types.TimestampKind, false),
schema.NewColumn(fromCommitDate, fromCommitDateTag, types.TimestampKind, false),
)
if err != nil {
return nil, err
}
expFunc, err := expreval.ExpressionFuncFromSQLExpressions(nbf, schema.UnkeyedSchemaFromCols(colColl), filters)
if err != nil {
@@ -39,14 +39,14 @@ const (
c1Tag = 2
)
var oneIntPKSch = schema.MustSchemaFromCols(mustColColl(schema.NewColCollection(
var oneIntPKSch = schema.MustSchemaFromCols(schema.NewColCollection(
schema.NewColumn(pk0Name, pk0Tag, types.IntKind, true),
schema.NewColumn(c1Name, c1Tag, types.IntKind, false))))
schema.NewColumn(c1Name, c1Tag, types.IntKind, false)))
var twoIntPKSch = schema.MustSchemaFromCols(mustColColl(schema.NewColCollection(
var twoIntPKSch = schema.MustSchemaFromCols(schema.NewColCollection(
schema.NewColumn(pk0Name, pk0Tag, types.IntKind, true),
schema.NewColumn(pk1Name, pk1Tag, types.IntKind, true),
schema.NewColumn(c1Name, c1Tag, types.IntKind, false))))
schema.NewColumn(c1Name, c1Tag, types.IntKind, false)))
func int64Range(start, end, stride int64) []int64 {
vals := make([]int64, 0, end-start)
@@ -100,14 +100,6 @@ func genTwoIntPKRows(pks ...[2]int64) []row.Row {
return rows
}
func mustColColl(coll *schema.ColCollection, err error) *schema.ColCollection {
if err != nil {
panic(err)
}
return coll
}
func mapFromRows(ctx context.Context, vrw types.ValueReadWriter, sch schema.Schema, rows ...row.Row) (types.Map, error) {
resMap, err := types.NewMap(ctx, vrw)
@@ -26,7 +26,7 @@ import (
"github.com/dolthub/dolt/go/store/types"
)
var queryCatalogCols, _ = schema.NewColCollection(
var queryCatalogCols = schema.NewColCollection(
// QueryCatalogIdCol is the name of the primary key column of the query catalog table
schema.NewColumn(doltdb.QueryCatalogIdCol, schema.QueryCatalogIdTag, types.StringKind, true, schema.NotNullConstraint{}),
// QueryCatalogOrderCol is the column containing the order of the queries in the catalog
@@ -203,7 +203,7 @@ func TestNewAndAndOrFuncs(t *testing.T) {
}
func TestNewComparisonFunc(t *testing.T) {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("col0", 0, types.IntKind, true),
schema.NewColumn("col1", 1, types.IntKind, false),
schema.NewColumn("date", 2, types.TimestampKind, false),
+1 -4
View File
@@ -38,15 +38,12 @@ func SchemasTableSqlSchema() sql.Schema {
// The fixed dolt schema for the `dolt_schemas` table.
func SchemasTableSchema() schema.Schema {
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn(doltdb.SchemasTablesTypeCol, schema.DoltSchemasTypeTag, types.StringKind, false),
schema.NewColumn(doltdb.SchemasTablesNameCol, schema.DoltSchemasNameTag, types.StringKind, false),
schema.NewColumn(doltdb.SchemasTablesFragmentCol, schema.DoltSchemasFragmentTag, types.StringKind, false),
schema.NewColumn(doltdb.SchemasTablesIdCol, schema.DoltSchemasIdTag, types.IntKind, true, schema.NotNullConstraint{}),
)
if err != nil {
panic(err) // should never happen
}
return schema.MustSchemaFromCols(colColl)
}
+7 -33
View File
@@ -39,10 +39,7 @@ func NewResultSetSchema(colNamesAndTypes ...interface{}) schema.Schema {
cols[i/2] = schema.NewColumn(name, uint64(i/2), nomsKind, false)
}
collection, err := schema.NewColCollection(cols...)
if err != nil {
panic("unexpected error " + err.Error())
}
collection := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(collection)
}
@@ -56,10 +53,7 @@ func NewResultSetRow(colVals ...types.Value) row.Row {
cols[i] = schema.NewColumn(fmt.Sprintf("%v", i), uint64(i), nomsKind, false)
}
collection, err := schema.NewColCollection(cols...)
if err != nil {
panic("unexpected error " + err.Error())
}
collection := schema.NewColCollection(cols...)
sch := schema.UnkeyedSchemaFromCols(collection)
r, err := row.New(types.Format_7_18, sch, taggedVals)
@@ -97,11 +91,7 @@ func NewRowWithPks(pkColVals []types.Value, nonPkVals ...types.Value) row.Row {
tag++
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
panic(err.Error())
}
colColl := schema.NewColCollection(cols...)
sch := schema.MustSchemaFromCols(colColl)
r, err := row.New(types.Format_7_18, sch, taggedVals)
@@ -165,11 +155,7 @@ func NewSchemaForTable(tableName string, colNamesAndTypes ...interface{}) schema
cols[i/2] = schema.NewColumn(name, tag, nomsKind, isPk, constraints...)
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
panic(err.Error())
}
colColl := schema.NewColCollection(cols...)
return schema.MustSchemaFromCols(colColl)
}
@@ -200,11 +186,7 @@ func ConcatRows(schemasAndRows ...interface{}) row.Row {
})
}
colCol, err := schema.NewColCollection(cols...)
if err != nil {
panic(err.Error())
}
colCol := schema.NewColCollection(cols...)
r, err := row.New(types.Format_7_18, schema.UnkeyedSchemaFromCols(colCol), taggedVals)
if err != nil {
@@ -277,11 +259,7 @@ func CompressSchema(sch schema.Schema, colNames ...string) schema.Schema {
})
}
colCol, err := schema.NewColCollection(cols...)
if err != nil {
panic(err.Error())
}
colCol := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colCol)
}
@@ -300,10 +278,6 @@ func CompressSchemas(schs ...schema.Schema) schema.Schema {
})
}
colCol, err := schema.NewColCollection(cols...)
if err != nil {
panic(err.Error())
}
colCol := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colCol)
}
+1 -1
View File
@@ -983,7 +983,7 @@ func TestAlterSystemTables(t *testing.T) {
})
dtestutils.CreateTestTable(t, dEnv, "dolt_docs",
doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRow(types.String("LICENSE.md"), types.String("A license")))
dtestutils.CreateTestTable(t, dEnv, doltdb.DoltQueryCatalogTableName,
dtables.DoltQueryCatalogSchema,
+1 -1
View File
@@ -194,7 +194,7 @@ var systemTableDeleteTests = []DeleteTest{
{
Name: "delete dolt_docs",
AdditionalSetup: CreateTableFn("dolt_docs",
doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRow(types.String("LICENSE.md"), types.String("A license"))),
DeleteQuery: "delete from dolt_docs",
ExpectedErr: "cannot delete from table",
+1 -1
View File
@@ -401,7 +401,7 @@ var systemTableInsertTests = []InsertTest{
{
Name: "insert into dolt_docs",
AdditionalSetup: CreateTableFn("dolt_docs",
doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRow(types.String("LICENSE.md"), types.String("A license"))),
InsertQuery: "insert into dolt_docs (doc_name, doc_text) values ('README.md', 'Some text')",
ExpectedErr: "cannot insert into table",
@@ -256,7 +256,7 @@ var systemTableReplaceTests = []ReplaceTest{
{
Name: "replace into dolt_docs",
AdditionalSetup: CreateTableFn("dolt_docs",
doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRow(types.String("LICENSE.md"), types.String("A license"))),
ReplaceQuery: "replace into dolt_docs (doc_name, doc_text) values ('README.md', 'Some text')",
ExpectedErr: "cannot insert into table",
+4 -4
View File
@@ -1465,15 +1465,15 @@ var systemTableSelectTests = []SelectTest{
{
Name: "select from dolt_docs",
AdditionalSetup: CreateTableFn("dolt_docs",
doltdocs.DoltDocsSchema,
NewRowWithSchema(doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRowWithSchema(doltdocs.Schema,
types.String("LICENSE.md"),
types.String("A license")),
),
Query: "select * from dolt_docs",
ExpectedRows: ToSqlRows(CompressSchema(doltdocs.DoltDocsSchema),
ExpectedRows: ToSqlRows(CompressSchema(doltdocs.Schema),
NewRow(types.String("LICENSE.md"), types.String("A license"))),
ExpectedSchema: CompressSchema(doltdocs.DoltDocsSchema),
ExpectedSchema: CompressSchema(doltdocs.Schema),
},
{
Name: "select from dolt_query_catalog",
+1 -1
View File
@@ -378,7 +378,7 @@ var systemTableUpdateTests = []UpdateTest{
{
Name: "update dolt_docs",
AdditionalSetup: CreateTableFn("dolt_docs",
doltdocs.DoltDocsSchema,
doltdocs.Schema,
NewRow(types.String("LICENSE.md"), types.String("A license"))),
UpdateQuery: "update dolt_docs set doc_text = 'Some text')",
ExpectedErr: "cannot insert into table",
@@ -39,11 +39,7 @@ func ToDoltResultSchema(sqlSchema sql.Schema) (schema.Schema, error) {
cols = append(cols, convertedCol)
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colColl), nil
}
@@ -113,10 +109,7 @@ func ToDoltSchema(ctx context.Context, root *doltdb.RootValue, tableName string,
cols = append(cols, convertedCol)
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
err = schema.ValidateForInsert(colColl)
if err != nil {
@@ -35,11 +35,10 @@ func TestCompositeTableReader(t *testing.T) {
ctx := context.Background()
coll, err := schema.NewColCollection(
coll := schema.NewColCollection(
schema.NewColumn("id", 0, types.UintKind, true, schema.NotNullConstraint{}),
schema.NewColumn("val", 1, types.IntKind, false),
)
require.NoError(t, err)
sch, err := schema.SchemaFromCols(coll)
require.NoError(t, err)
@@ -58,11 +58,10 @@ func TestIndexEditorConcurrency(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_concurrency", []string{"v1"}, schema.IndexProperties{IsUnique: false, Comment: ""})
@@ -146,11 +145,10 @@ func TestIndexEditorConcurrencyPostInsert(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_concurrency", []string{"v1"}, schema.IndexProperties{IsUnique: false, Comment: ""})
@@ -231,11 +229,10 @@ func TestIndexEditorConcurrencyUnique(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_concurrency", []string{"v1"}, schema.IndexProperties{IsUnique: true, Comment: ""})
@@ -319,10 +316,9 @@ func TestIndexEditorUniqueMultipleNil(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_unique", []string{"v1"}, schema.IndexProperties{IsUnique: true, Comment: ""})
@@ -362,11 +358,10 @@ func TestIndexEditorWriteAfterFlush(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_concurrency", []string{"v1"}, schema.IndexProperties{IsUnique: false, Comment: ""})
@@ -425,10 +420,9 @@ func TestIndexEditorFlushClearsUniqueError(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_unq", []string{"v1"}, schema.IndexProperties{IsUnique: true, Comment: ""})
@@ -659,7 +653,7 @@ func TestIndexRebuildingWithTwoIndexes(t *testing.T) {
func TestIndexRebuildingUniqueSuccessOneCol(t *testing.T) {
db, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_7_18, nil, nil)
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
schema.NewColumn("v2", 3, types.IntKind, false),
@@ -690,7 +684,7 @@ func TestIndexRebuildingUniqueSuccessOneCol(t *testing.T) {
func TestIndexRebuildingUniqueSuccessTwoCol(t *testing.T) {
db, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_7_18, nil, nil)
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
schema.NewColumn("v2", 3, types.IntKind, false),
@@ -721,7 +715,7 @@ func TestIndexRebuildingUniqueSuccessTwoCol(t *testing.T) {
func TestIndexRebuildingUniqueFailOneCol(t *testing.T) {
db, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_7_18, nil, nil)
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
schema.NewColumn("v2", 3, types.IntKind, false),
@@ -752,7 +746,7 @@ func TestIndexRebuildingUniqueFailOneCol(t *testing.T) {
func TestIndexRebuildingUniqueFailTwoCol(t *testing.T) {
db, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_7_18, nil, nil)
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
schema.NewColumn("v2", 3, types.IntKind, false),
@@ -830,7 +824,7 @@ func createTestRowDataFromTaggedValues(t *testing.T, vrw types.ValueReadWriter,
}
func createTestSchema(t *testing.T) schema.Schema {
colColl, _ := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", idTag, types.UUIDKind, true, schema.NotNullConstraint{}),
schema.NewColumn("first", firstTag, types.StringKind, false, schema.NotNullConstraint{}),
schema.NewColumn("last", lastTag, types.StringKind, false, schema.NotNullConstraint{}),
@@ -45,11 +45,10 @@ func TestTableEditorConcurrency(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
tableSchVal, err := encoding.MarshalSchemaAsNomsValue(context.Background(), db, tableSch)
@@ -142,11 +141,10 @@ func TestTableEditorConcurrencyPostInsert(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
tableSchVal, err := encoding.MarshalSchemaAsNomsValue(context.Background(), db, tableSch)
@@ -237,11 +235,10 @@ func TestTableEditorWriteAfterFlush(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
tableSchVal, err := encoding.MarshalSchemaAsNomsValue(context.Background(), db, tableSch)
@@ -309,11 +306,10 @@ func TestTableEditorDuplicateKeyHandling(t *testing.T) {
format := types.Format_7_18
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false),
schema.NewColumn("v2", 2, types.IntKind, false))
require.NoError(t, err)
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
tableSchVal, err := encoding.MarshalSchemaAsNomsValue(context.Background(), db, tableSch)
+1 -1
View File
@@ -25,7 +25,7 @@ import (
)
func TestBadRow(t *testing.T) {
cols, _ := schema.NewColCollection(schema.NewColumn("id", 0, types.IntKind, true))
cols := schema.NewColCollection(schema.NewColumn("id", 0, types.IntKind, true))
sch, err := schema.SchemaFromCols(cols)
assert.NoError(t, err)
@@ -32,7 +32,7 @@ const (
greatTag
)
var fields, _ = schema.NewColCollection(
var fields = schema.NewColCollection(
schema.Column{Name: "name", Tag: nameTag, Kind: types.StringKind, IsPartOfPK: true, TypeInfo: typeinfo.StringDefaultType, Constraints: nil},
schema.Column{Name: "age", Tag: ageTag, Kind: types.UintKind, IsPartOfPK: true, TypeInfo: typeinfo.Uint64Type, Constraints: nil},
schema.Column{Name: "title", Tag: titleTag, Kind: types.StringKind, IsPartOfPK: true, TypeInfo: typeinfo.StringDefaultType, Constraints: nil},
-16
View File
@@ -1,16 +0,0 @@
// Copyright 2019 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package typed provides helper functions and utility classes for working with typed table data.
package typed
@@ -48,7 +48,7 @@ func TestReader(t *testing.T) {
fs := filesys.EmptyInMemFS("/")
require.NoError(t, fs.WriteFile("file.json", []byte(testJSON)))
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.Column{
Name: "id",
Tag: 0,
@@ -71,7 +71,6 @@ func TestReader(t *testing.T) {
TypeInfo: typeinfo.StringDefaultType,
},
)
require.NoError(t, err)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -129,7 +128,7 @@ func TestReaderBadJson(t *testing.T) {
fs := filesys.EmptyInMemFS("/")
require.NoError(t, fs.WriteFile("file.json", []byte(testJSON)))
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.Column{
Name: "id",
Tag: 0,
@@ -152,7 +151,6 @@ func TestReaderBadJson(t *testing.T) {
TypeInfo: typeinfo.StringDefaultType,
},
)
require.NoError(t, err)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -100,10 +100,9 @@ func mustTuple(id int64) types.Tuple {
func TestRangeReader(t *testing.T) {
ctx := context.Background()
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", pkTag, types.IntKind, true),
schema.NewColumn("val", valTag, types.IntKind, false))
require.NoError(t, err)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -158,10 +157,9 @@ func TestRangeReader(t *testing.T) {
func TestRangeReaderOnEmptyMap(t *testing.T) {
ctx := context.Background()
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", pkTag, types.IntKind, true),
schema.NewColumn("val", valTag, types.IntKind, false))
require.NoError(t, err)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -39,7 +39,7 @@ const (
titleColTag = 1
)
var colColl, _ = schema.NewColCollection(
var colColl = schema.NewColCollection(
schema.NewColumn(idCol, idColTag, types.UUIDKind, true, schema.NotNullConstraint{}),
schema.NewColumn(nameCol, nameColTag, types.StringKind, false),
schema.NewColumn(ageCol, ageColTag, types.UintKind, false),
@@ -36,10 +36,9 @@ const (
func TestReaderForKeys(t *testing.T) {
ctx := context.Background()
colColl, err := schema.NewColCollection(
colColl := schema.NewColCollection(
schema.NewColumn("id", pkTag, types.IntKind, true),
schema.NewColumn("val", valTag, types.IntKind, false))
require.NoError(t, err)
sch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
@@ -1,42 +0,0 @@
// Copyright 2019 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package typed
import (
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
)
func TypedSchemaUnion(schemas ...schema.Schema) (schema.Schema, error) {
var allCols []schema.Column
for _, sch := range schemas {
err := sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
allCols = append(allCols, col)
return false, nil
})
if err != nil {
return nil, err
}
}
allColColl, err := schema.NewColCollection(allCols...)
if err != nil {
return nil, err
}
return schema.SchemaFromCols(allColColl)
}
@@ -42,7 +42,7 @@ func getSampleRows() (rows []row.Row) {
{Name: ageColName, Tag: ageColTag, Kind: types.UintKind, IsPartOfPK: false, Constraints: nil},
{Name: titleColName, Tag: titleColTag, Kind: types.StringKind, IsPartOfPK: false, Constraints: nil},
}
colColl, _ := schema.NewColCollection(inCols...)
colColl := schema.NewColCollection(inCols...)
rowSch := schema.MustSchemaFromCols(colColl)
return []row.Row{
mustRow(row.New(types.Format_7_18, rowSch, row.TaggedValues{
@@ -95,7 +95,7 @@ func TestHandleRow(t *testing.T) {
func testSchema() schema.Schema {
col1 := schema.NewColumn("col1", 0, types.StringKind, false)
col2 := schema.NewColumn("col2", 1, types.StringKind, false)
colColl, _ := schema.NewColCollection(col1, col2)
colColl := schema.NewColCollection(col1, col2)
return schema.UnkeyedSchemaFromCols(colColl)
}
@@ -51,7 +51,7 @@ func TestWriter(t *testing.T) {
{Name: ageColName, Tag: ageColTag, Kind: types.StringKind, IsPartOfPK: false, Constraints: nil},
{Name: titleColName, Tag: titleColTag, Kind: types.StringKind, IsPartOfPK: false, Constraints: nil},
}
colColl, _ := schema.NewColCollection(inCols...)
colColl := schema.NewColCollection(inCols...)
rowSch := schema.UnkeyedSchemaFromCols(colColl)
// Simulate fixed-width string values that the table writer needs to function.
@@ -215,7 +215,7 @@ func TestEastAsianLanguages(t *testing.T) {
{Name: ageColName, Tag: ageColTag, Kind: types.StringKind, IsPartOfPK: false, Constraints: nil},
{Name: titleColName, Tag: titleColTag, Kind: types.StringKind, IsPartOfPK: false, Constraints: nil},
}
colColl, _ := schema.NewColCollection(inCols...)
colColl := schema.NewColCollection(inCols...)
rowSch := schema.UnkeyedSchemaFromCols(colColl)
// Simulate fixed-width string values that the table writer needs to function.
@@ -18,7 +18,6 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/dolthub/dolt/go/libraries/doltcore/table/typed"
"github.com/dolthub/dolt/go/store/types"
)
@@ -41,7 +40,7 @@ func NewUntypedSchemaWithFirstTag(firstTag uint64, colNames ...string) (map[stri
nameToTag[name] = tag
}
colColl, _ := schema.NewColCollection(cols...)
colColl := schema.NewColCollection(cols...)
sch := schema.MustSchemaFromCols(colColl)
return nameToTag, sch
@@ -92,11 +91,7 @@ func UntypeSchema(sch schema.Schema) (schema.Schema, error) {
return nil, err
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
return schema.SchemaFromCols(colColl)
}
@@ -116,11 +111,7 @@ func UnkeySchema(sch schema.Schema) (schema.Schema, error) {
return nil, err
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colColl), nil
}
@@ -142,25 +133,40 @@ func UntypeUnkeySchema(sch schema.Schema) (schema.Schema, error) {
return nil, err
}
colColl, err := schema.NewColCollection(cols...)
if err != nil {
return nil, err
}
colColl := schema.NewColCollection(cols...)
return schema.UnkeyedSchemaFromCols(colColl), nil
}
// UntypedSchemaUnion takes an arbitrary number of schemas and provides the union of all of their key and non-key columns.
// The columns will all be of type types.StringKind and and IsPartOfPK will be false for every column, and all of the
// columns will be in the schemas non-key ColumnCollection.
// columns will be in the schemas non-key ColumnCollection. Columns that share tags must have compatible types.
func UntypedSchemaUnion(schemas ...schema.Schema) (schema.Schema, error) {
// todo: use colcoll union
unionSch, err := typed.TypedSchemaUnion(schemas...)
var allCols []schema.Column
tags := make(map[uint64]schema.Column)
for _, sch := range schemas {
err := sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
if existingCol, ok := tags[tag]; !ok {
tags[tag] = col
allCols = append(allCols, col)
} else if !existingCol.Compatible(col) {
return true, schema.ErrColTagCollision
}
return false, nil
})
if err != nil {
return nil, err
}
}
allColColl := schema.NewColCollection(allCols...)
sch, err := schema.SchemaFromCols(allColColl)
if err != nil {
return nil, err
}
return UntypeSchema(unionSch)
return UntypeSchema(sch)
}
@@ -20,6 +20,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/schema/typeinfo"
"github.com/dolthub/dolt/go/store/types"
)
@@ -105,7 +106,7 @@ func TestUntypedSchemaUnion(t *testing.T) {
schema.NewColumn("e", 4, types.BoolKind, false),
}
untypedColColl, _ := schema.NewColCollection(
untypedColColl := schema.NewColCollection(
schema.NewColumn("a", 0, types.StringKind, true, schema.NotNullConstraint{}),
schema.NewColumn("b", 1, types.StringKind, true),
schema.NewColumn("c", 2, types.StringKind, true),
@@ -113,7 +114,9 @@ func TestUntypedSchemaUnion(t *testing.T) {
schema.NewColumn("e", 4, types.StringKind, false))
unequalColCollumn := cols[1]
unequalColCollumn.Name = "bad"
unequalColCollumn.Name = "incompatible_type"
unequalColCollumn.TypeInfo = typeinfo.DatetimeType
unequalColCollumn.Kind = types.TimestampKind
untypedSch := schema.MustSchemaFromCols(untypedColColl)
@@ -128,8 +131,8 @@ func TestUntypedSchemaUnion(t *testing.T) {
}
for i, test := range tests {
colCollA, _ := schema.NewColCollection(test.colsA...)
colCollB, _ := schema.NewColCollection(test.colsB...)
colCollA := schema.NewColCollection(test.colsA...)
colCollB := schema.NewColCollection(test.colsB...)
schA := schema.MustSchemaFromCols(colCollA)
schB := schema.MustSchemaFromCols(colCollB)
-2
View File
@@ -292,5 +292,3 @@ func BenchmarkMapItr(b *testing.B) {
require.NoError(b, err)
}
}*/