dolt/go/libraries/doltcore/row: Thread Format() through Row.

This commit is contained in:
Aaron Son
2019-07-09 11:33:16 -07:00
committed by Brian Hendriks
parent b27c57c6da
commit 1f6241f7dc
56 changed files with 205 additions and 166 deletions
+1 -1
View File
@@ -25,7 +25,7 @@ type DataWindow struct {
func appendRow(drs []*DimRow, toUntyped, toTyped *rowconv.RowConverter, k, v types.Value) ([]*DimRow, bool) {
if !types.IsNull(k) && !types.IsNull(v) {
r := row.FromNoms(toUntyped.SrcSch, k.(types.Tuple), v.(types.Tuple))
r := row.FromNoms(types.Format_7_18, toUntyped.SrcSch, k.(types.Tuple), v.(types.Tuple))
dr, err := NewDimRow(r, toUntyped, toTyped)
if err != nil {
+4 -4
View File
@@ -18,7 +18,7 @@ type DimRow struct {
}
func NewDimRow(r row.Row, toUntyped, toTyped *rowconv.RowConverter) (*DimRow, error) {
key := r.NomsMapKey(types.Format_7_18, toUntyped.SrcSch).Value(context.Background())
key := r.NomsMapKey(toUntyped.SrcSch).Value(context.Background())
untyped, err := toUntyped.Convert(r)
if err != nil {
@@ -29,7 +29,7 @@ func NewDimRow(r row.Row, toUntyped, toTyped *rowconv.RowConverter) (*DimRow, er
}
func (dr *DimRow) StoreValue(me *types.MapEditor) *types.MapEditor {
r := row.New(dr.toTyped.SrcSch, dr.currentVals)
r := row.New(types.Format_7_18, dr.toTyped.SrcSch, dr.currentVals)
typed, err := dr.toTyped.Convert(r)
if err != nil {
@@ -37,7 +37,7 @@ func (dr *DimRow) StoreValue(me *types.MapEditor) *types.MapEditor {
}
typedSch := dr.toTyped.DestSch
key := typed.NomsMapKey(me.Format(), typedSch).Value(context.Background())
key := typed.NomsMapKey(typedSch).Value(context.Background())
if !dr.key.Equals(key) {
me = me.Remove(dr.key)
@@ -47,7 +47,7 @@ func (dr *DimRow) StoreValue(me *types.MapEditor) *types.MapEditor {
dr.dbVals = dr.currentVals
log.Println("stored vals")
return me.Set(key, typed.NomsMapValue(me.Format(), typedSch))
return me.Set(key, typed.NomsMapValue(typedSch))
}
func (dr *DimRow) UpdateVal(tag uint64, str string) error {
+2 -2
View File
@@ -113,7 +113,7 @@ func PutRow(commandStr string, args []string, dEnv *env.DoltEnv) int {
if verr == nil {
me := tbl.GetRowData(context.TODO()).Edit()
updated := me.Set(row.NomsMapKey(types.Format_7_18, sch), row.NomsMapValue(types.Format_7_18, sch)).Map(context.TODO())
updated := me.Set(row.NomsMapKey(sch), row.NomsMapValue(sch)).Map(context.TODO())
tbl = tbl.UpdateRows(context.Background(), updated)
root = root.PutTable(context.Background(), dEnv.DoltDB, prArgs.TableName, tbl)
@@ -159,7 +159,7 @@ func createRow(sch schema.Schema, prArgs *putRowArgs) (row.Row, errhand.VerboseE
return nil, errhand.BuildDError("failed to create row converter").AddCause(err).Build()
}
untypedRow := row.New(untypedSch, untypedTaggedVals)
untypedRow := row.New(types.Format_7_18, untypedSch, untypedTaggedVals)
typedRow, err := rconv.Convert(untypedRow)
if err != nil {
+1 -1
View File
@@ -315,7 +315,7 @@ var noConfLabel = types.String(" ")
func CnfTransformer(inSch, outSch schema.Schema, conflicts types.Map) func(inRow row.Row, props pipeline.ReadableMap) (rowData []*pipeline.TransformedRowResult, badRowDetails string) {
return func(inRow row.Row, props pipeline.ReadableMap) ([]*pipeline.TransformedRowResult, string) {
ctx := context.TODO()
key := inRow.NomsMapKey(types.Format_7_18, inSch)
key := inRow.NomsMapKey(inSch)
var err error
if conflicts.Has(ctx, key.Value(ctx)) {
+1 -1
View File
@@ -119,7 +119,7 @@ func (cds *ColorDiffSink) ProcRowWithProps(r row.Row, props pipeline.ReadableMap
return false
})
r = row.New(cds.sch, taggedVals)
r = row.New(types.Format_7_18, cds.sch, taggedVals)
return cds.ttw.WriteRow(context.TODO(), r)
}
+2 -2
View File
@@ -101,12 +101,12 @@ func (rdRd *RowDiffSource) NextDiff() (row.Row, pipeline.ImmutableProperties, er
}
if d.OldValue != nil {
oldRow := row.FromNoms(originalOldSch, d.KeyValue.(types.Tuple), d.OldValue.(types.Tuple))
oldRow := row.FromNoms(types.Format_7_18, originalOldSch, d.KeyValue.(types.Tuple), d.OldValue.(types.Tuple))
mappedOld, _ = rdRd.oldConv.Convert(oldRow)
}
if d.NewValue != nil {
newRow := row.FromNoms(originalNewSch, d.KeyValue.(types.Tuple), d.NewValue.(types.Tuple))
newRow := row.FromNoms(types.Format_7_18, originalNewSch, d.KeyValue.(types.Tuple), d.NewValue.(types.Tuple))
mappedNew, _ = rdRd.newConv.Convert(newRow)
}
+2 -2
View File
@@ -201,7 +201,7 @@ func (t *Table) GetRow(ctx context.Context, pk types.Tuple, sch schema.Schema) (
return nil, false
}
return row.FromNoms(sch, pk, fieldsVal.(types.Tuple)), true
return row.FromNoms(types.Format_7_18, sch, pk, fieldsVal.(types.Tuple)), true
}
// GetRows takes in a PKItr which will supply a stream of primary keys to be pulled from the table. Each key is
@@ -225,7 +225,7 @@ func (t *Table) GetRows(ctx context.Context, pkItr PKItr, numPKs int, sch schema
if fieldsVal == nil {
missing = append(missing, pk)
} else {
r := row.FromNoms(sch, pk, fieldsVal.(types.Tuple))
r := row.FromNoms(types.Format_7_18, sch, pk, fieldsVal.(types.Tuple))
rows = append(rows, r)
}
}
+5 -5
View File
@@ -18,18 +18,18 @@ var id3, _ = uuid.NewRandom()
func createTestRowData(vrw types.ValueReadWriter, sch schema.Schema) (types.Map, []row.Row) {
rows := make([]row.Row, 4)
rows[0] = row.New(sch, row.TaggedValues{
rows[0] = row.New(types.Format_7_18, sch, row.TaggedValues{
idTag: types.UUID(id0), firstTag: types.String("bill"), lastTag: types.String("billerson"), ageTag: types.Uint(53)})
rows[1] = row.New(sch, row.TaggedValues{
rows[1] = row.New(types.Format_7_18, sch, row.TaggedValues{
idTag: types.UUID(id1), firstTag: types.String("eric"), lastTag: types.String("ericson"), isMarriedTag: types.Bool(true), ageTag: types.Uint(21)})
rows[2] = row.New(sch, row.TaggedValues{
rows[2] = row.New(types.Format_7_18, sch, row.TaggedValues{
idTag: types.UUID(id2), firstTag: types.String("john"), lastTag: types.String("johnson"), isMarriedTag: types.Bool(false), ageTag: types.Uint(53)})
rows[3] = row.New(sch, row.TaggedValues{
rows[3] = row.New(types.Format_7_18, sch, row.TaggedValues{
idTag: types.UUID(id3), firstTag: types.String("robert"), lastTag: types.String("robertson"), ageTag: types.Uint(36)})
ed := types.NewMap(context.Background(), vrw).Edit()
for _, r := range rows {
ed = ed.Set(r.NomsMapKey(types.Format_7_18, sch), r.NomsMapValue(types.Format_7_18, sch))
ed = ed.Set(r.NomsMapKey(sch), r.NomsMapValue(sch))
}
return ed.Map(context.Background()), rows
+7 -6
View File
@@ -1,6 +1,9 @@
package dtestutils
import (
"strconv"
"testing"
"github.com/google/uuid"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
@@ -8,8 +11,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"github.com/stretchr/testify/require"
"strconv"
"testing"
)
var UUIDS = []uuid.UUID{
@@ -59,7 +60,7 @@ func init() {
marriedStr = "false"
}
TypedRows = append(TypedRows, row.New(TypedSchema, taggedVals))
TypedRows = append(TypedRows, row.New(types.Format_7_18, TypedSchema, taggedVals))
taggedVals = row.TaggedValues{
IdTag: types.String(UUIDS[i].String()),
@@ -69,7 +70,7 @@ func init() {
IsMarriedTag: types.String(marriedStr),
}
UntypedRows = append(UntypedRows, row.New(UntypedSchema, taggedVals))
UntypedRows = append(UntypedRows, row.New(types.Format_7_18, UntypedSchema, taggedVals))
}
}
@@ -87,7 +88,7 @@ func NewTypedRow(id uuid.UUID, name string, age uint, isMarried bool, title *str
TitleTag: titleVal,
}
return row.New(TypedSchema, taggedVals)
return row.New(types.Format_7_18, TypedSchema, taggedVals)
}
func CreateTestDataTable(typed bool) (*table.InMemTable, schema.Schema) {
@@ -140,7 +141,7 @@ func ConvertToSchema(sch schema.Schema, rs ...row.Row) []row.Row {
}
return false
})
newRows[i] = row.New(sch, taggedVals)
newRows[i] = row.New(types.Format_7_18, sch, taggedVals)
}
return newRows
}
+4 -3
View File
@@ -2,6 +2,9 @@ package dtestutils
import (
"context"
"math"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
@@ -10,8 +13,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"github.com/stretchr/testify/require"
"math"
"testing"
)
// CreateSchema returns a schema from the columns given, panicking on any errors.
@@ -26,7 +27,7 @@ func NewRow(sch schema.Schema, values ...types.Value) row.Row {
for i := range values {
taggedVals[uint64(i)] = values[i]
}
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// AddColumnToSchema returns a new schema by adding the given column to the given schema. Will panic on an invalid
+3 -2
View File
@@ -4,6 +4,8 @@ import (
"context"
"errors"
"fmt"
"io"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/diff"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
@@ -12,7 +14,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped/tabular"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"io"
)
var WriteBufSize = 256 * 1024
@@ -104,7 +105,7 @@ func (cs *ConflictSink) ProcRowWithProps(r row.Row, props pipeline.ReadableMap)
return false
})
r = row.New(cs.sch, taggedVals)
r = row.New(types.Format_7_18, cs.sch, taggedVals)
return cs.ttw.WriteRow(context.TODO(), r)
}
@@ -2,6 +2,8 @@ package merge
import (
"context"
"io"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/rowconv"
@@ -9,7 +11,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/pipeline"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"io"
)
const (
@@ -156,7 +157,7 @@ func createRow(key types.Tuple, nonKey types.Value, rowConv *rowconv.RowConverte
return nil
}
srcData := row.FromNoms(rowConv.SrcSch, key, nonKey.(types.Tuple))
srcData := row.FromNoms(types.Format_7_18, rowConv.SrcSch, key, nonKey.(types.Tuple))
row, err := rowConv.Convert(srcData)
if err != nil {
+1 -1
View File
@@ -55,7 +55,7 @@ func ResolveTable(ctx context.Context, vrw types.ValueReadWriter, tbl *doltdb.Ta
if types.IsNull(updated) {
rowEditor.Remove(key)
} else {
r := row.FromNoms(tblSch, key.(types.Tuple), updated.(types.Tuple))
r := row.FromNoms(types.Format_7_18, tblSch, key.(types.Tuple), updated.(types.Tuple))
if !row.IsValid(r, tblSch) {
itrErr = table.NewBadRow(r)
@@ -115,9 +115,9 @@ func init() {
fakeSchema = schema.SchemaFromCols(fakeFields)
imtRows = []row.Row{
row.New(fakeSchema, row.TaggedValues{0: types.String("a"), 1: types.String("1")}),
row.New(fakeSchema, row.TaggedValues{0: types.String("b"), 1: types.String("2")}),
row.New(fakeSchema, row.TaggedValues{0: types.String("c"), 1: types.String("3")}),
row.New(types.Format_7_18, fakeSchema, row.TaggedValues{0: types.String("a"), 1: types.String("1")}),
row.New(types.Format_7_18, fakeSchema, row.TaggedValues{0: types.String("b"), 1: types.String("2")}),
row.New(types.Format_7_18, fakeSchema, row.TaggedValues{0: types.String("c"), 1: types.String("3")}),
}
imt = table.NewInMemTableWithData(fakeSchema, imtRows)
+18 -13
View File
@@ -6,8 +6,9 @@ import (
)
type nomsRow struct {
key TaggedValues
value TaggedValues
key TaggedValues
value TaggedValues
format *types.Format
}
func (nr nomsRow) IterSchema(sch schema.Schema, cb func(tag uint64, val types.Value) (stop bool)) bool {
@@ -55,13 +56,17 @@ func (nr nomsRow) SetColVal(tag uint64, val types.Value, sch schema.Schema) (Row
rowVal = nr.value.Set(tag, val)
}
return nomsRow{rowKey, rowVal}, nil
return nomsRow{rowKey, rowVal, nr.format}, nil
}
panic("can't set a column whose tag isn't in the schema. verify before calling this function.")
}
func New(sch schema.Schema, colVals TaggedValues) Row {
func (nr nomsRow) Format() *types.Format {
return nr.format
}
func New(format *types.Format, sch schema.Schema, colVals TaggedValues) Row {
allCols := sch.GetAllCols()
keyVals := make(TaggedValues)
@@ -80,14 +85,14 @@ func New(sch schema.Schema, colVals TaggedValues) Row {
return false
})
return fromTaggedVals(sch, keyVals, nonKeyVals)
return fromTaggedVals(format, sch, keyVals, nonKeyVals)
}
// fromTaggedVals will take a schema, a map of tag to value for the key, and a map of tag to value for non key values,
// and generates a row. When a schema adds or removes columns from the non-key portion of the row, the schema will be
// updated, but the rows will not be touched. So the non-key portion of the row may contain values that are not in the
// schema (The keys must match the schema though).
func fromTaggedVals(sch schema.Schema, keyVals, nonKeyVals TaggedValues) Row {
func fromTaggedVals(format *types.Format, sch schema.Schema, keyVals, nonKeyVals TaggedValues) Row {
allCols := sch.GetAllCols()
keyVals.Iter(func(tag uint64, val types.Value) (stop bool) {
@@ -122,20 +127,20 @@ func fromTaggedVals(sch schema.Schema, keyVals, nonKeyVals TaggedValues) Row {
return false
})
return nomsRow{keyVals, filteredVals}
return nomsRow{keyVals, filteredVals, format}
}
func FromNoms(sch schema.Schema, nomsKey, nomsVal types.Tuple) Row {
func FromNoms(format *types.Format, sch schema.Schema, nomsKey, nomsVal types.Tuple) Row {
key := ParseTaggedValues(nomsKey)
val := ParseTaggedValues(nomsVal)
return fromTaggedVals(sch, key, val)
return fromTaggedVals(format, sch, key, val)
}
func (nr nomsRow) NomsMapKey(format *types.Format, sch schema.Schema) types.LesserValuable {
return nr.key.NomsTupleForTags(format, sch.GetPKCols().Tags, true)
func (nr nomsRow) NomsMapKey(sch schema.Schema) types.LesserValuable {
return nr.key.NomsTupleForTags(nr.format, sch.GetPKCols().Tags, true)
}
func (nr nomsRow) NomsMapValue(format *types.Format, sch schema.Schema) types.Valuable {
return nr.value.NomsTupleForTags(format, sch.GetNonPKCols().SortedTags, false)
func (nr nomsRow) NomsMapValue(sch schema.Schema) types.Valuable {
return nr.value.NomsTupleForTags(nr.format, sch.GetNonPKCols().SortedTags, false)
}
+19 -19
View File
@@ -54,7 +54,7 @@ func newTestRow() Row {
titleColTag: titleVal,
}
return New(sch, vals)
return New(types.Format_7_18, sch, vals)
}
func TestItrRowCols(t *testing.T) {
@@ -78,7 +78,7 @@ func TestItrRowCols(t *testing.T) {
func TestFromNoms(t *testing.T) {
// New() will faithfully return null values in the row, but such columns won't ever be set when loaded from Noms.
// So we use a row here with no null values set to avoid this inconsistency.
expectedRow := New(sch, TaggedValues{
expectedRow := New(types.Format_7_18, sch, TaggedValues{
fnColTag: fnVal,
lnColTag: lnVal,
addrColTag: addrVal,
@@ -96,7 +96,7 @@ func TestFromNoms(t *testing.T) {
types.Uint(titleColTag), titleVal,
)
r := FromNoms(sch, keys, vals)
r := FromNoms(types.Format_7_18, sch, keys, vals)
assert.Equal(t, expectedRow, r)
})
@@ -107,11 +107,11 @@ func TestFromNoms(t *testing.T) {
)
vals := types.NewTuple(types.Format_7_18)
expectedRow := New(sch, TaggedValues{
expectedRow := New(types.Format_7_18, sch, TaggedValues{
fnColTag: fnVal,
lnColTag: lnVal,
})
r := FromNoms(sch, keys, vals)
r := FromNoms(types.Format_7_18, sch, keys, vals)
assert.Equal(t, expectedRow, r)
})
@@ -127,7 +127,7 @@ func TestFromNoms(t *testing.T) {
types.Uint(unusedTag), fnVal,
)
r := FromNoms(sch, keys, vals)
r := FromNoms(types.Format_7_18, sch, keys, vals)
assert.Equal(t, expectedRow, r)
})
@@ -142,7 +142,7 @@ func TestFromNoms(t *testing.T) {
)
assert.Panics(t, func() {
FromNoms(sch, keys, vals)
FromNoms(types.Format_7_18, sch, keys, vals)
})
})
@@ -157,7 +157,7 @@ func TestFromNoms(t *testing.T) {
)
assert.Panics(t, func() {
FromNoms(sch, keys, vals)
FromNoms(types.Format_7_18, sch, keys, vals)
})
})
@@ -174,7 +174,7 @@ func TestFromNoms(t *testing.T) {
)
assert.Panics(t, func() {
FromNoms(sch, keys, vals)
FromNoms(types.Format_7_18, sch, keys, vals)
})
})
@@ -190,7 +190,7 @@ func TestFromNoms(t *testing.T) {
)
assert.Panics(t, func() {
FromNoms(sch, keys, vals)
FromNoms(types.Format_7_18, sch, keys, vals)
})
})
}
@@ -207,21 +207,21 @@ func TestSetColVal(t *testing.T) {
updatedVal := types.String("sanchez")
r := newTestRow()
assert.Equal(t, r, New(sch, expected))
assert.Equal(t, r, New(types.Format_7_18, sch, expected))
updated, err := r.SetColVal(lnColTag, updatedVal, sch)
assert.NoError(t, err)
// validate calling set does not mutate the original row
assert.Equal(t, r, New(sch, expected))
assert.Equal(t, r, New(types.Format_7_18, sch, expected))
expected[lnColTag] = updatedVal
assert.Equal(t, updated, New(sch, expected))
assert.Equal(t, updated, New(types.Format_7_18, sch, expected))
// set to a nil value
updated, err = updated.SetColVal(titleColTag, nil, sch)
assert.NoError(t, err)
delete(expected, titleColTag)
assert.Equal(t, updated, New(sch, expected))
assert.Equal(t, updated, New(types.Format_7_18, sch, expected))
})
t.Run("invalid update", func(t *testing.T) {
@@ -234,7 +234,7 @@ func TestSetColVal(t *testing.T) {
r := newTestRow()
assert.Equal(t, r, New(sch, expected))
assert.Equal(t, r, New(types.Format_7_18, sch, expected))
// SetColVal allows an incorrect type to be set for a column
updatedRow, err := r.SetColVal(lnColTag, types.Bool(true), sch)
@@ -246,7 +246,7 @@ func TestSetColVal(t *testing.T) {
assert.Equal(t, uint64(lnColTag), invalidCol.Tag)
// validate calling set does not mutate the original row
assert.Equal(t, r, New(sch, expected))
assert.Equal(t, r, New(types.Format_7_18, sch, expected))
})
}
@@ -255,10 +255,10 @@ func TestConvToAndFromTuple(t *testing.T) {
r := newTestRow()
keyTpl := r.NomsMapKey(types.Format_7_18, sch).(TupleVals)
valTpl := r.NomsMapValue(types.Format_7_18, sch).(TupleVals)
keyTpl := r.NomsMapKey(sch).(TupleVals)
valTpl := r.NomsMapValue(sch).(TupleVals)
r2 := FromNoms(sch, keyTpl.Value(ctx).(types.Tuple), valTpl.Value(ctx).(types.Tuple))
r2 := FromNoms(types.Format_7_18, sch, keyTpl.Value(ctx).(types.Tuple), valTpl.Value(ctx).(types.Tuple))
fmt.Println(Fmt(context.Background(), r, sch))
fmt.Println(Fmt(context.Background(), r2, sch))
+4 -2
View File
@@ -12,10 +12,10 @@ var ErrRowNotValid = errors.New("invalid row for current schema")
type Row interface {
// Returns the noms map key for this row, using the schema provided.
NomsMapKey(format *types.Format, sch schema.Schema) types.LesserValuable
NomsMapKey(sch schema.Schema) types.LesserValuable
// Returns the noms map value for this row, using the schema provided.
NomsMapValue(format *types.Format, sch schema.Schema) types.Valuable
NomsMapValue(sch schema.Schema) types.Valuable
// Iterates over all the columns in the row. Columns that have no value set will not be visited.
IterCols(cb func(tag uint64, val types.Value) (stop bool)) bool
@@ -30,6 +30,8 @@ type Row interface {
// Sets a value for the column with the tag given, returning a new row with the update.
SetColVal(tag uint64, val types.Value, sch schema.Schema) (Row, error)
Format() *types.Format
}
func GetFieldByName(colName string, r Row, sch schema.Schema) (types.Value, bool) {
@@ -2,6 +2,7 @@ package rowconv
import (
"fmt"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
@@ -85,7 +86,7 @@ func (rc *RowConverter) Convert(inRow row.Row) (row.Row, error) {
return nil, err
}
outRow := row.New(rc.DestSch, outTaggedVals)
outRow := row.New(types.Format_7_18, rc.DestSch, outTaggedVals)
return outRow, nil
}
@@ -32,7 +32,7 @@ func TestRowConverter(t *testing.T) {
}
id, _ := uuid.NewRandom()
inRow := row.New(srcSch, row.TaggedValues{
inRow := row.New(types.Format_7_18, srcSch, row.TaggedValues{
0: types.UUID(id),
1: types.Float(1.25),
2: types.Uint(12345678),
@@ -46,7 +46,7 @@ func TestRowConverter(t *testing.T) {
outData := results[0].RowData
destSch := mapping.DestSch
expected := row.New(destSch, row.TaggedValues{
expected := row.New(types.Format_7_18, destSch, row.TaggedValues{
0: types.String(id.String()),
1: types.String("1.25"),
2: types.String("12345678"),
@@ -63,7 +63,7 @@ func updateTableWithNewSchema(ctx context.Context, db *doltdb.DoltDB, tbl *doltd
return true
}
me.Set(newRow.NomsMapKey(vrw.Format(), newSchema), newRow.NomsMapValue(vrw.Format(), newSchema))
me.Set(newRow.NomsMapKey(newSchema), newRow.NomsMapValue(newSchema))
return false
})
if updateErr != nil {
@@ -240,7 +240,7 @@ func TestAddColumnToTable(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -66,7 +66,7 @@ func TestDropColumn(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(dtestutils.TypedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, dtestutils.TypedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -82,7 +82,7 @@ func TestRenameColumn(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -72,7 +72,7 @@ func TestRenameTable(t *testing.T) {
rowData := newTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
+4 -4
View File
@@ -411,7 +411,7 @@ func TestAddColumn(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -546,7 +546,7 @@ func TestDropColumn(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(updatedTable.GetSchema(ctx), key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, updatedTable.GetSchema(ctx), key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -672,7 +672,7 @@ func TestRenameColumn(t *testing.T) {
rowData := updatedTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(updatedTable.GetSchema(ctx), key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, updatedTable.GetSchema(ctx), key.(types.Tuple), value.(types.Tuple)))
return false
})
@@ -767,7 +767,7 @@ func TestRenameTable(t *testing.T) {
rowData := newTable.GetRowData(ctx)
var foundRows []row.Row
rowData.Iter(ctx, func(key, value types.Value) (stop bool) {
foundRows = append(foundRows, row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
foundRows = append(foundRows, row.FromNoms(types.Format_7_18, tt.expectedSchema, key.(types.Tuple), value.(types.Tuple)))
return false
})
+1 -2
View File
@@ -10,7 +10,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped/resultset"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"vitess.io/vitess/go/vt/sqlparser"
)
@@ -81,7 +80,7 @@ func ExecuteDelete(ctx context.Context, db *doltdb.DoltDB, root *doltdb.RootValu
}
result.NumRowsDeleted++
me.Remove(r.NomsMapKey(types.Format_7_18, tableSch))
me.Remove(r.NomsMapKey(tableSch))
}
table = table.UpdateRows(ctx, me.Map(ctx))
+1 -1
View File
@@ -131,7 +131,7 @@ func TestExecuteDelete(t *testing.T) {
for _, r := range AllPeopleRows {
deletedIdx := FindRowIndex(r, tt.deletedRows)
key := r.NomsMapKey(types.Format_7_18, PeopleTestSchema)
key := r.NomsMapKey(PeopleTestSchema)
_, ok := table.GetRow(ctx, key.Value(ctx).(types.Tuple), PeopleTestSchema)
if deletedIdx >= 0 {
assert.False(t, ok, "Row not deleted: %v", r)
+3 -3
View File
@@ -95,7 +95,7 @@ func ExecuteInsert(ctx context.Context, db *doltdb.DoltDB, root *doltdb.RootValu
}
}
key := r.NomsMapKey(types.Format_7_18, tableSch).Value(ctx)
key := r.NomsMapKey(tableSch).Value(ctx)
rowExists := rowData.Get(ctx, key) != nil
// TODO(binformat)
@@ -111,7 +111,7 @@ func ExecuteInsert(ctx context.Context, db *doltdb.DoltDB, root *doltdb.RootValu
return errInsert("Duplicate primary key: '%v'", getPrimaryKeyString(r, tableSch))
}
}
me.Set(key, r.NomsMapValue(types.Format_7_18, tableSch))
me.Set(key, r.NomsMapValue(tableSch))
// TODO(binformat)
insertedPKHashes[key.Hash(types.Format_7_18)] = struct{}{}
@@ -264,7 +264,7 @@ func makeRow(columns []schema.Column, tableSch schema.Schema, tuple sqlparser.Va
}
}
return row.New(tableSch, taggedVals), nil
return row.New(types.Format_7_18, tableSch, taggedVals), nil
}
// Returns an error result with return type to match ExecuteInsert
+25 -6
View File
@@ -55,7 +55,7 @@ func TestExecuteInsert(t *testing.T) {
name: "insert one row, null values",
query: `insert into people (id, first, last, is_married, age, rating) values
(7, "Maggie", "Simpson", null, null, null)`,
insertedValues: []row.Row{row.New(PeopleTestSchema, row.TaggedValues{IdTag: types.Int(7), FirstTag: types.String("Maggie"), LastTag: types.String("Simpson")})},
insertedValues: []row.Row{row.New(types.Format_7_18, PeopleTestSchema, row.TaggedValues{IdTag: types.Int(7), FirstTag: types.String("Maggie"), LastTag: types.String("Simpson")})},
expectedResult: InsertResult{NumRowsInserted: 1},
},
{
@@ -253,8 +253,8 @@ func TestExecuteInsert(t *testing.T) {
(7, "Maggie", "Simpson"),
(8, "Milhouse", "Van Houten")`,
insertedValues: []row.Row{
row.New(PeopleTestSchema, row.TaggedValues{IdTag: types.Int(7), FirstTag: types.String("Maggie"), LastTag: types.String("Simpson")}),
row.New(PeopleTestSchema, row.TaggedValues{IdTag: types.Int(8), FirstTag: types.String("Milhouse"), LastTag: types.String("Van Houten")}),
row.New(types.Format_7_18, PeopleTestSchema, row.TaggedValues{IdTag: types.Int(7), FirstTag: types.String("Maggie"), LastTag: types.String("Simpson")}),
row.New(types.Format_7_18, PeopleTestSchema, row.TaggedValues{IdTag: types.Int(8), FirstTag: types.String("Milhouse"), LastTag: types.String("Van Houten")}),
},
expectedResult: InsertResult{NumRowsInserted: 2},
},
@@ -318,11 +318,30 @@ func TestExecuteInsert(t *testing.T) {
assert.True(t, ok)
for _, expectedRow := range tt.insertedValues {
foundRow, ok := table.GetRow(ctx, expectedRow.NomsMapKey(types.Format_7_18, PeopleTestSchema).Value(ctx).(types.Tuple), PeopleTestSchema)
foundRow, ok := table.GetRow(ctx, expectedRow.NomsMapKey(PeopleTestSchema).Value(ctx).(types.Tuple), PeopleTestSchema)
assert.True(t, ok, "Row not found: %v", expectedRow)
opts := cmp.Options{cmp.AllowUnexported(expectedRow), dtestutils.FloatComparer}
assert.True(t, cmp.Equal(expectedRow, foundRow, opts), "Rows not equals, found diff %v", cmp.Diff(expectedRow, foundRow, opts))
eq, diff := rowsEqual(expectedRow, foundRow)
assert.True(t, eq, "Rows not equals, found diff %v", diff)
}
})
}
}
func rowsEqual(expected, actual row.Row) (bool, string) {
er, ar := make(map[uint64]types.Value), make(map[uint64]types.Value)
expected.IterCols(func (t uint64, v types.Value) bool {
er[t] = v
return false
})
actual.IterCols(func (t uint64, v types.Value) bool {
ar[t] = v
return false
})
opts := cmp.Options{cmp.AllowUnexported(), dtestutils.FloatComparer}
eq := cmp.Equal(er, ar, opts)
var diff string
if !eq {
diff = cmp.Diff(er, ar, opts)
}
return eq, diff
}
+4 -3
View File
@@ -4,6 +4,9 @@ import (
"context"
"errors"
"fmt"
"strconv"
"time"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
@@ -11,8 +14,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped/resultset"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"strconv"
"time"
"vitess.io/vitess/go/vt/sqlparser"
)
@@ -647,7 +648,7 @@ func createOutputSchemaMappingTransform(selectStmt *SelectStatement) pipeline.Na
taggedVals[uint64(i)] = val
}
}
r := row.New(selectStmt.ResultSetSchema, taggedVals)
r := row.New(types.Format_7_18, selectStmt.ResultSetSchema, taggedVals)
return []*pipeline.TransformedRowResult{{r, nil}}, ""
}
+3 -2
View File
@@ -4,6 +4,7 @@ import (
"context"
"errors"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
@@ -153,7 +154,7 @@ func describeColumn(col schema.Column) row.Row {
4: types.String("NULL"), // TODO: when schemas store defaults, use them here
5: types.String(""), // Extra column reserved for future use
}
return row.New(showColumnsSchema(), taggedVals)
return row.New(types.Format_7_18, showColumnsSchema(), taggedVals)
}
// Takes a single-dimensional array of strings and transposes it to a 2D array, with a single element per row.
@@ -173,7 +174,7 @@ func toRows(ss [][]string, sch schema.Schema) []row.Row {
for tag, col := range r {
taggedVals[uint64(tag)] = types.String(col)
}
rows[i] = row.New(sch, taggedVals)
rows[i] = row.New(types.Format_7_18, sch, taggedVals)
}
return rows
}
@@ -108,7 +108,7 @@ func NewPeopleRow(id int, first, last string, isMarried bool, age int, rating fl
RatingTag: types.Float(rating),
}
return row.New(PeopleTestSchema, vals)
return row.New(types.Format_7_18, PeopleTestSchema, vals)
}
func newEpsRow(id int, name string, airdate int, rating float32) row.Row {
@@ -119,7 +119,7 @@ func newEpsRow(id int, name string, airdate int, rating float32) row.Row {
epRatingTag: types.Float(rating),
}
return row.New(EpisodesTestSchema, vals)
return row.New(types.Format_7_18, EpisodesTestSchema, vals)
}
func newAppsRow(charId, epId int, comment string) row.Row {
@@ -129,7 +129,7 @@ func newAppsRow(charId, epId int, comment string) row.Row {
appCommentsTag: types.String(comment),
}
return row.New(AppearancesTestSchema, vals)
return row.New(types.Format_7_18, AppearancesTestSchema, vals)
}
// Most rows don't have these optional fields set, as they aren't needed for basic testing
@@ -145,7 +145,7 @@ func NewPeopleRowWithOptionalFields(id int, first, last string, isMarried bool,
NumEpisodesTag: types.Uint(numEpisodes),
}
return row.New(PeopleTestSchema, vals)
return row.New(types.Format_7_18, PeopleTestSchema, vals)
}
// 6 characters
@@ -2,10 +2,11 @@ package sqltestutil
import (
"fmt"
"strconv"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"strconv"
)
// Creates a new schema for a result set specified by the given pairs of column names and types. Column names are
@@ -45,7 +46,7 @@ func NewResultSetRow(colVals ...types.Value) row.Row {
}
sch := schema.UnkeyedSchemaFromCols(collection)
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// Creates a new row with the values given, using ascending tag numbers starting at 0.
@@ -73,7 +74,7 @@ func NewRow(colVals ...types.Value) row.Row {
sch := schema.SchemaFromCols(colColl)
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// Creates a new schema with the pairs of column names and types given, using ascending tag numbers starting at 0.
@@ -136,7 +137,7 @@ func ConcatRows(schemasAndRows ...interface{}) row.Row {
panic(err.Error())
}
return row.New(schema.UnkeyedSchemaFromCols(colCol), taggedVals)
return row.New(types.Format_7_18, schema.UnkeyedSchemaFromCols(colCol), taggedVals)
}
// Rewrites the tag numbers for the row given to begin at zero and be contiguous, just like result set schemas. We don't
@@ -155,11 +156,11 @@ func CompressRow(sch schema.Schema, r row.Row) row.Row {
})
// call to compress schema is a no-op in most cases
return row.New(CompressSchema(sch), compressedRow)
return row.New(types.Format_7_18, CompressSchema(sch), compressedRow)
}
// Compresses each of the rows given ala compressRow
func CompressRows(sch schema.Schema, rs ...row.Row, ) []row.Row {
func CompressRows(sch schema.Schema, rs ...row.Row) []row.Row {
compressed := make([]row.Row, len(rs))
for i := range rs {
compressed[i] = CompressRow(sch, rs[i])
@@ -224,4 +225,4 @@ func CompressSchemas(schs ...schema.Schema) schema.Schema {
}
return schema.UnkeyedSchemaFromCols(colCol)
}
}
+2 -3
View File
@@ -11,7 +11,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table/untyped/resultset"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"vitess.io/vitess/go/vt/sqlparser"
)
@@ -144,7 +143,7 @@ func ExecuteUpdate(ctx context.Context, db *doltdb.DoltDB, root *doltdb.RootValu
return nil, errFmt(ConstraintFailedFmt, col.Name, constraint)
}
tvs := r.NomsMapKey(types.Format_7_18, tableSch).(row.TupleVals)
tvs := r.NomsMapKey(tableSch).(row.TupleVals)
key := tvs.Value(ctx)
if anyColChanged {
@@ -153,7 +152,7 @@ func ExecuteUpdate(ctx context.Context, db *doltdb.DoltDB, root *doltdb.RootValu
result.NumRowsUnchanged += 1
}
me.Set(key, r.NomsMapValue(types.Format_7_18, tableSch))
me.Set(key, r.NomsMapValue(tableSch))
}
table = table.UpdateRows(ctx, me.Map(ctx))
+3 -4
View File
@@ -2,7 +2,6 @@ package sql
import (
"context"
"github.com/google/go-cmp/cmp"
"github.com/google/uuid"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/dtestutils"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
@@ -347,10 +346,10 @@ func TestExecuteUpdate(t *testing.T) {
expectedRow = tt.updatedRows[updatedIdx]
}
foundRow, ok := table.GetRow(ctx, expectedRow.NomsMapKey(types.Format_7_18, PeopleTestSchema).Value(ctx).(types.Tuple), PeopleTestSchema)
foundRow, ok := table.GetRow(ctx, expectedRow.NomsMapKey(PeopleTestSchema).Value(ctx).(types.Tuple), PeopleTestSchema)
assert.True(t, ok, "Row not found: %v", expectedRow)
opts := cmp.Options{cmp.AllowUnexported(expectedRow), dtestutils.FloatComparer}
assert.True(t, cmp.Equal(expectedRow, foundRow, opts), "Rows not equals, found diff %v", cmp.Diff(expectedRow, foundRow, opts))
eq, diff := rowsEqual(expectedRow, foundRow)
assert.True(t, eq, "Rows not equals, found diff %v", diff)
}
})
}
+4 -4
View File
@@ -1,12 +1,12 @@
package sqle
import (
"io"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"github.com/src-d/go-mysql-server/sql"
"io"
)
// An iterator over the rows of a table.
@@ -32,7 +32,7 @@ func (itr *doltTableRowIter) Next() (sql.Row, error) {
return nil, io.EOF
}
doltRow := row.FromNoms(itr.table.sch, key.(types.Tuple), val.(types.Tuple))
doltRow := row.FromNoms(types.Format_7_18, itr.table.sch, key.(types.Tuple), val.(types.Tuple))
return doltRowToSqlRow(doltRow, itr.table.sch), nil
}
@@ -47,7 +47,7 @@ func doltRowToSqlRow(doltRow row.Row, sch schema.Schema) sql.Row {
i := 0
sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool) {
value, _:= doltRow.GetColVal(tag)
value, _ := doltRow.GetColVal(tag)
colVals[i] = doltColValToSqlColVal(value)
i++
return false
@@ -65,7 +65,7 @@ func SqlRowToDoltRow(r sql.Row, doltSchema schema.Schema) row.Row {
}
}
return row.New(doltSchema, taggedVals)
return row.New(types.Format_7_18, doltSchema, taggedVals)
}
// Returns the column value for a SQL column
+1 -1
View File
@@ -10,7 +10,7 @@ import (
func TestBadRow(t *testing.T) {
cols, _ := schema.NewColCollection(schema.NewColumn("id", 0, types.IntKind, true))
sch := schema.SchemaFromCols(cols)
emptyRow := row.New(sch, row.TaggedValues{})
emptyRow := row.New(types.Format_7_18, sch, row.TaggedValues{})
err := NewBadRow(emptyRow, "details")
+1 -1
View File
@@ -49,7 +49,7 @@ func (imt *InMemTable) AppendRow(r row.Row) error {
sort.Slice(imt.rows, func(i, j int) bool {
iRow := imt.rows[i]
jRow := imt.rows[j]
return iRow.NomsMapKey(types.Format_7_18, imt.sch).Less(types.Format_7_18, jRow.NomsMapKey(types.Format_7_18, imt.sch))
return iRow.NomsMapKey(imt.sch).Less(types.Format_7_18, jRow.NomsMapKey(imt.sch))
})
return nil
@@ -28,19 +28,19 @@ var rowSch = schema.SchemaFromCols(fields)
// These are in noms-key-sorted order, since InMemoryTable.AppendRow sorts its rows. This should probably be done
// programatically instead of hard-coded.
var rows = []row.Row{
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameTag: types.String("Bill Billerson"),
ageTag: types.Uint(32),
titleTag: types.String("Senior Dufus"),
greatTag: types.Bool(true),
}),
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameTag: types.String("John Johnson"),
ageTag: types.Uint(21),
titleTag: types.String("Intern Dufus"),
greatTag: types.Bool(true),
}),
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameTag: types.String("Rob Robertson"),
ageTag: types.Uint(25),
titleTag: types.String("Dufus"),
@@ -72,7 +72,7 @@ func TableFromJSON(ctx context.Context, fp string, vrw types.ValueReadWriter, sc
var rowMap types.Map
me := types.NewMap(ctx, vrw).Edit()
for _, row := range tblRows {
me = me.Set(row.NomsMapKey(vrw.Format(), sch), row.NomsMapValue(vrw.Format(), sch))
me = me.Set(row.NomsMapKey(sch), row.NomsMapValue(sch))
}
rowMap = me.Map(ctx)
@@ -117,5 +117,5 @@ func convToRow(sch schema.Schema, rowMap map[string]interface{}) (row.Row, error
}
}
return row.New(sch, taggedVals), nil
return row.New(types.Format_7_18, sch, taggedVals), nil
}
@@ -52,8 +52,8 @@ func (nmc *NomsMapCreator) WriteRow(ctx context.Context, r row.Row) error {
}
}()
pk := r.NomsMapKey(nmc.vrw.Format(), nmc.sch)
fieldVals := r.NomsMapValue(nmc.vrw.Format(), nmc.sch)
pk := r.NomsMapKey(nmc.sch)
fieldVals := r.NomsMapValue(nmc.sch)
if nmc.lastPK == nil || nmc.lastPK.Less(nmc.vrw.Format(), pk) {
pkVal := pk.Value(ctx)
@@ -81,8 +81,8 @@ func (nmu *NomsMapUpdater) WriteRow(ctx context.Context, r row.Row) error {
}
}()
pk := r.NomsMapKey(nmu.vrw.Format(), nmu.sch)
fieldVals := r.NomsMapValue(nmu.vrw.Format(), nmu.sch)
pk := r.NomsMapKey(nmu.sch)
fieldVals := r.NomsMapValue(nmu.sch)
nmu.acc.AddEdit(pk, fieldVals)
nmu.count++
@@ -56,7 +56,7 @@ func createRows(onlyUpdated, updatedAge bool) []row.Row {
ageColTag: types.Uint(age),
titleColTag: types.String(titles[i]),
}
rows = append(rows, row.New(sch, rowVals))
rows = append(rows, row.New(types.Format_7_18, sch, rowVals))
}
}
@@ -13,15 +13,16 @@ import (
// NomsMapReader is a TableReader that reads rows from a noms table which is stored in a types.Map where the key is
// a types.Value and the value is a types.Tuple of field values.
type NomsMapReader struct {
sch schema.Schema
itr types.MapIterator
sch schema.Schema
itr types.MapIterator
format *types.Format
}
// NewNomsMapReader creates a NomsMapReader for a given noms types.Map
func NewNomsMapReader(ctx context.Context, m types.Map, sch schema.Schema) *NomsMapReader {
itr := m.Iterator(ctx)
return &NomsMapReader{sch, itr}
return &NomsMapReader{sch, itr, m.Format()}
}
// GetSchema gets the schema of the rows that this reader will return
@@ -45,7 +46,7 @@ func (nmr *NomsMapReader) ReadRow(ctx context.Context) (row.Row, error) {
return nil, io.EOF
}
return row.FromNoms(nmr.sch, key.(types.Tuple), val.(types.Tuple)), nil
return row.FromNoms(nmr.format, nmr.sch, key.(types.Tuple), val.(types.Tuple)), nil
}
// Close should release resources being held
@@ -5,6 +5,9 @@ import (
"context"
"errors"
"fmt"
"io"
"strings"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/table"
@@ -12,8 +15,6 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/utils/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/utils/iohelp"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"io"
"strings"
)
// ReadBufSize is the size of the buffer used when reading the csv file. It is set at the package level and all
@@ -145,6 +146,6 @@ func (csvr *CSVReader) parseRow(line string) (row.Row, error) {
}
}
r := row.New(sch, taggedVals)
r := row.New(types.Format_7_18, sch, taggedVals)
return r, nil
}
@@ -42,15 +42,15 @@ John Johnson,21,Intern Dufus
colColl, _ := schema.NewColCollection(inCols...)
rowSch := schema.SchemaFromCols(colColl)
rows := []row.Row{
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameColTag: types.String("Bill Billerson"),
ageColTag: types.Uint(32),
titleColTag: types.String("Senior Dufus")}),
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameColTag: types.String("Rob Robertson"),
ageColTag: types.Uint(25),
titleColTag: types.String("Dufus")}),
row.New(rowSch, row.TaggedValues{
row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameColTag: types.String("John Johnson"),
ageColTag: types.Uint(21),
titleColTag: types.String("Intern Dufus")}),
@@ -86,7 +86,7 @@ func testSchema() schema.Schema {
func testRow(col1, col2 string) pipeline.RowWithProps {
taggedVals := row.TaggedValues{0: types.String(col1), 1: types.String(col2)}
return pipeline.RowWithProps{row.New(testSchema(), taggedVals), pipeline.NoProps}
return pipeline.RowWithProps{row.New(types.Format_7_18, testSchema(), taggedVals), pipeline.NoProps}
}
func rs(rs ...pipeline.RowWithProps) []pipeline.RowWithProps {
@@ -93,6 +93,6 @@ func (fwtTr *FWTTransformer) Transform(r row.Row, props pipeline.ReadableMap) ([
destFields[tag] = types.String(buf)
}
r = row.New(sch, destFields)
r = row.New(types.Format_7_18, sch, destFields)
return []*pipeline.TransformedRowResult{{RowData: r}}, ""
}
@@ -36,5 +36,5 @@ func (np *NullPrinter) ProcessRow(inRow row.Row, props pipeline.ReadableMap) (ro
return false
})
return []*pipeline.TransformedRowResult{{RowData: row.New(np.Sch, taggedVals)}}, ""
return []*pipeline.TransformedRowResult{{RowData: row.New(types.Format_7_18, np.Sch, taggedVals)}}, ""
}
@@ -3,6 +3,7 @@ package resultset
import (
"errors"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/rowconv"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
@@ -239,7 +240,7 @@ func (rss *ResultSetSchema) CrossProduct(tables []*TableResult, cb CrossProductR
return
}
emptyRow := RowWithSchema{row.New(rss.destSch, row.TaggedValues{}), rss.destSch}
emptyRow := RowWithSchema{row.New(types.Format_7_18, rss.destSch, row.TaggedValues{}), rss.destSch}
rss.cph(emptyRow, tables, cb)
}
@@ -414,7 +414,7 @@ func newResultSetRow(colVals ...types.Value) row.Row {
}
sch := schema.UnkeyedSchemaFromCols(collection)
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// Creates a new schema for a result set specified by the given pairs of column names and types. Column names are
@@ -485,12 +485,12 @@ func TestCombineRows(t *testing.T) {
rss, err := newFromSourceSchemas(peopleTestSchema, episodesTestSchema, appearancesTestSchema)
assert.Nil(t, err)
r := RowWithSchema{row.New(rss.destSch, nil), rss.destSch}
r := RowWithSchema{row.New(types.Format_7_18, rss.destSch, nil), rss.destSch}
r = rss.combineRows(r, RowWithSchema{homer, peopleTestSchema})
r = rss.combineRows(r, RowWithSchema{ep1, episodesTestSchema})
r = rss.combineRows(r, RowWithSchema{app1, appearancesTestSchema})
expectedRow := row.New(rss.destSch, row.TaggedValues{
expectedRow := row.New(types.Format_7_18, rss.destSch, row.TaggedValues{
0: mustGetColVal(homer, idTag),
1: mustGetColVal(homer, firstTag),
2: mustGetColVal(homer, lastTag),
@@ -516,12 +516,12 @@ func TestCombineRows(t *testing.T) {
assert.Nil(t, err)
// combine the rows in the opposite order that their schemas were declared
r := RowWithSchema{row.New(rss.destSch, nil), rss.destSch}
r := RowWithSchema{row.New(types.Format_7_18, rss.destSch, nil), rss.destSch}
r = rss.combineRows(r, RowWithSchema{app1, appearancesTestSchema})
r = rss.combineRows(r, RowWithSchema{ep1, episodesTestSchema})
r = rss.combineRows(r, RowWithSchema{homer, peopleTestSchema})
expectedRow := row.New(rss.destSch, row.TaggedValues{
expectedRow := row.New(types.Format_7_18, rss.destSch, row.TaggedValues{
0: mustGetColVal(homer, idTag),
1: mustGetColVal(homer, firstTag),
2: mustGetColVal(homer, lastTag),
@@ -546,10 +546,10 @@ func TestCombineRows(t *testing.T) {
rss, err := newFromSourceSchemas(peopleTestSchema)
assert.Nil(t, err)
r := RowWithSchema{row.New(rss.destSch, nil), rss.destSch}
r := RowWithSchema{row.New(types.Format_7_18, rss.destSch, nil), rss.destSch}
r = rss.combineRows(r, RowWithSchema{homer, peopleTestSchema})
expectedRow := row.New(rss.destSch, row.TaggedValues{
expectedRow := row.New(types.Format_7_18, rss.destSch, row.TaggedValues{
0: mustGetColVal(homer, idTag),
1: mustGetColVal(homer, firstTag),
2: mustGetColVal(homer, lastTag),
@@ -573,7 +573,7 @@ func TestCrossProduct(t *testing.T) {
newTableResultForTest(rs(app1, app2), appearancesTestSchema),
}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(rss.destSch, nil)}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(types.Format_7_18, rss.destSch, nil)}
expectedResult := rs(
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}, RowWithSchema{ep1, episodesTestSchema}, RowWithSchema{app1, appearancesTestSchema}).Row,
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}, RowWithSchema{ep1, episodesTestSchema}, RowWithSchema{app2, appearancesTestSchema}).Row,
@@ -599,7 +599,7 @@ func TestCrossProduct(t *testing.T) {
newTableResultForTest(rs(app1), appearancesTestSchema),
}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(rss.destSch, nil)}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(types.Format_7_18, rss.destSch, nil)}
expectedResult := rs(
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}, RowWithSchema{ep1, episodesTestSchema}, RowWithSchema{app1, appearancesTestSchema}).Row,
)
@@ -617,7 +617,7 @@ func TestCrossProduct(t *testing.T) {
newTableResultForTest(rs(ep1, ep2), episodesTestSchema),
}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(rss.destSch, nil)}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(types.Format_7_18, rss.destSch, nil)}
expectedResult := rs(
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}, RowWithSchema{ep1, episodesTestSchema}).Row,
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}, RowWithSchema{ep2, episodesTestSchema}).Row,
@@ -637,7 +637,7 @@ func TestCrossProduct(t *testing.T) {
newTableResultForTest(rs(homer, marge, bart), peopleTestSchema),
}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(rss.destSch, nil)}
resultRow := RowWithSchema{Schema: rss.destSch, Row: row.New(types.Format_7_18, rss.destSch, nil)}
expectedResult := rs(
rss.combineAllRows(resultRow.Copy(), RowWithSchema{homer, peopleTestSchema}).Row,
rss.combineAllRows(resultRow.Copy(), RowWithSchema{marge, peopleTestSchema}).Row,
@@ -775,7 +775,7 @@ func newPeopleRow(id int, first, last string, isMarried bool, age int, rating fl
ratingTag: types.Float(rating),
}
return row.New(peopleTestSchema, vals)
return row.New(types.Format_7_18, peopleTestSchema, vals)
}
func newEpsRow(id int, name string, airdate int, rating float32) row.Row {
@@ -786,7 +786,7 @@ func newEpsRow(id int, name string, airdate int, rating float32) row.Row {
epRatingTag: types.Float(rating),
}
return row.New(episodesTestSchema, vals)
return row.New(types.Format_7_18, episodesTestSchema, vals)
}
func newAppsRow(charId, epId int, comment string) row.Row {
@@ -796,7 +796,7 @@ func newAppsRow(charId, epId int, comment string) row.Row {
appCommentsTag: types.String(comment),
}
return row.New(appearancesTestSchema, vals)
return row.New(types.Format_7_18, appearancesTestSchema, vals)
}
// 6 characters
@@ -65,7 +65,7 @@ func TestWriter(t *testing.T) {
rows := make([]row.Row, len(ages))
for i := range ages {
rows[i] = row.New(rowSch, row.TaggedValues{
rows[i] = row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameColTag: types.String(names[i]),
ageColTag: types.String(ages[i]),
titleColTag: types.String(titles[i]),
@@ -216,7 +216,7 @@ func TestEastAsianLanguages(t *testing.T) {
rows := make([]row.Row, len(ages))
for i := range ages {
rows[i] = row.New(rowSch, row.TaggedValues{
rows[i] = row.New(types.Format_7_18, rowSch, row.TaggedValues{
nameColTag: types.String(names[i]),
ageColTag: types.String(ages[i]),
titleColTag: types.String(titles[i]),
@@ -48,7 +48,7 @@ func NewRowFromStrings(sch schema.Schema, valStrs []string) row.Row {
taggedVals[tag] = types.String(valStr)
}
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// NewRowFromTaggedStrings takes an untyped schema and a map of column tag to string value and returns a row
@@ -58,7 +58,7 @@ func NewRowFromTaggedStrings(sch schema.Schema, taggedStrs map[uint64]string) ro
taggedVals[tag] = types.String(valStr)
}
return row.New(sch, taggedVals)
return row.New(types.Format_7_18, sch, taggedVals)
}
// UntypeSchema takes a schema and returns a schema with the same columns, but with the types of each of those columns
@@ -7,6 +7,7 @@ import (
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/row"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltcore/schema"
"github.com/liquidata-inc/ld/dolt/go/store/types"
"github.com/tealeg/xlsx"
)
@@ -51,7 +52,7 @@ func decodeXLSXRows(xlData [][][]string, sch schema.Schema) ([]row.Row, error) {
return nil, err
}
}
rows = append(rows, row.New(sch, taggedVals))
rows = append(rows, row.New(types.Format_7_18, sch, taggedVals))
fmt.Println(rows)
}
@@ -33,7 +33,7 @@ func TestDecodeXLSXRows(t *testing.T) {
taggedVals[uint64(2)], _ = doltcore.StringToValue("otori", types.StringKind)
taggedVals[uint64(3)], _ = doltcore.StringToValue("24", types.StringKind)
newRow := row.New(sch, taggedVals)
newRow := row.New(types.Format_7_18, sch, taggedVals)
if !reflect.DeepEqual(decoded[0], newRow) {
t.Log("error!")
+4
View File
@@ -136,6 +136,10 @@ func (m Map) firstOrLast(ctx context.Context, last bool) (Value, Value) {
return entry.key, entry.value
}
func (m Map) Format() *Format {
return m.format()
}
func (m Map) First(ctx context.Context) (Value, Value) {
return m.firstOrLast(ctx, false)
}