Reverts to some changes in PR

This commit is contained in:
Neil Macneale IV
2024-08-20 15:40:24 -07:00
parent fed30c181d
commit 49e0cb67e7
36 changed files with 90 additions and 90 deletions

10
go/Godeps/LICENSES generated
View File

@@ -309,7 +309,7 @@ Isaac Dunham
Jaydeep Patil
Jens Gustedt
Jeremy Huntwork
Jo-Philipp Which
Jo-Philipp Wich
Joakim Sindholt
John Spencer
Julien Ramseier
@@ -322,7 +322,7 @@ Luca Barbato
Luka Perkov
M Farkas-Dyck (Strake)
Mahesh Bodapati
Markus Whichmann
Markus Wichmann
Masanori Ogino
Michael Clark
Michael Forney
@@ -409,7 +409,7 @@ under the standard MIT terms.
All other files which have no copyright comments are original works
produced specifically for use as part of this library, written either
by Rich Felker, the main author of the library, or by one or more
contributors listed above. Details on authorship of individual files
contibutors listed above. Details on authorship of individual files
can be found in the git version control history of the project. The
omission of copyright and license comments in each file is in the
interest of source tree size.
@@ -1749,7 +1749,7 @@ Codegen template in t_html_generator.h
---------------------------------------------------
For t_cl_generator.cc
* Copyright (c) 2008- Patrick Collision <patrick@collision.ie>
* Copyright (c) 2008- Patrick Collison <patrick@collison.ie>
* Copyright (c) 2006- Facebook
---------------------------------------------------
@@ -9921,7 +9921,7 @@ For more information, please refer to <http://unlicense.org/>
= github.com/zeebo/xxh3 licensed under: =
xxHash Library
Copyright (c) 2012-2014, Yann Collect
Copyright (c) 2012-2014, Yann Collet
Copyright (c) 2019, Jeff Wendling
All rights reserved.

View File

@@ -176,7 +176,7 @@ func getBranches(sqlCtx *sql.Context, queryEngine cli.Queryist, remote bool) ([]
return nil, err
}
if len(row) != 2 {
return nil, fmt.Errorf("unexpectedededly received multiple columns in '%s': %s", command, row)
return nil, fmt.Errorf("unexpectedly received multiple columns in '%s': %s", command, row)
}
rowStrings, err := sqlfmt.SqlRowAsStrings(row, schema)

View File

@@ -69,7 +69,7 @@ func newDiffWriter(diffOutput diffOutput) (diffWriter, error) {
case JsonDiffOutput:
return newJsonDiffWriter(iohelp.NopWrCloser(cli.CliOut))
default:
panic(fmt.Sprintf("unexpectededed diff output: %v", diffOutput))
panic(fmt.Sprintf("unexpected diff output: %v", diffOutput))
}
}

View File

@@ -310,7 +310,7 @@ func ConfigureServices(
primaryController := sqlEngine.GetUnderlyingEngine().Analyzer.Catalog.BinlogPrimaryController
doltBinlogPrimaryController, ok := primaryController.(*binlogreplication.DoltBinlogPrimaryController)
if !ok {
return fmt.Errorf("unexpectededed type of binlog controller: %T", primaryController)
return fmt.Errorf("unexpected type of binlog controller: %T", primaryController)
}
_, logBinValue, ok := sql.SystemVariables.GetGlobal("log_bin")
@@ -319,7 +319,7 @@ func ConfigureServices(
}
logBin, ok := logBinValue.(int8)
if !ok {
return fmt.Errorf("unexpectededed type for @@log_bin system variable: %T", logBinValue)
return fmt.Errorf("unexpected type for @@log_bin system variable: %T", logBinValue)
}
_, logBinBranchValue, ok := sql.SystemVariables.GetGlobal("log_bin_branch")
@@ -328,7 +328,7 @@ func ConfigureServices(
}
logBinBranch, ok := logBinBranchValue.(string)
if !ok {
return fmt.Errorf("unexpectededed type for @@log_bin_branch system variable: %T", logBinBranchValue)
return fmt.Errorf("unexpected type for @@log_bin_branch system variable: %T", logBinBranchValue)
}
if logBinBranch != "" {
// If an invalid branch has been configured, let the server start up so that it's

View File

@@ -140,7 +140,7 @@ func TestServerBadArgs(t *testing.T) {
tests := [][]string{
{"-H", "127.0.0.0.1"},
{"-H", "localahost"},
{"-H", "loclahost"},
{"-P", "300"},
{"-P", "90000"},
{"-l", "everything"},

View File

@@ -454,7 +454,7 @@ func (t *Table) HashOf() (hash.Hash, error) {
// UpdateNomsRows replaces the current row data and returns and updated Table.
// Calls to UpdateNomsRows will not be written to the database. The root must
// be updated with the updated table, and the root must be comitted or written.
// be updated with the updated table, and the root must be committed or written.
// Deprecated: use Table.UpdateRows() instead.
func (t *Table) UpdateNomsRows(ctx context.Context, updatedRows types.Map) (*Table, error) {
table, err := t.table.SetTableRows(ctx, durable.IndexFromNomsMap(updatedRows, t.ValueReadWriter(), t.NodeStore()))
@@ -466,7 +466,7 @@ func (t *Table) UpdateNomsRows(ctx context.Context, updatedRows types.Map) (*Tab
// UpdateRows replaces the current row data and returns and updated Table.
// Calls to UpdateRows will not be written to the database. The root must
// be updated with the updated table, and the root must be comitted or written.
// be updated with the updated table, and the root must be committed or written.
func (t *Table) UpdateRows(ctx context.Context, updatedRows durable.Index) (*Table, error) {
table, err := t.table.SetTableRows(ctx, updatedRows)
if err != nil {

View File

@@ -635,7 +635,7 @@ func (uv uniqValidator) validateDiff(ctx *sql.Context, diff tree.ThreeWayDiff) (
// deleteArtifact deletes the unique constraint violation artifact for the row identified by |key| and returns a
// boolean that indicates if an artifact was deleted, as well as an error that indicates if there were any
// unexpectededed errors encountered.
// unexpected errors encountered.
func (uv uniqValidator) deleteArtifact(ctx context.Context, key val.Tuple) (bool, error) {
artifactKey := uv.edits.BuildArtifactKey(ctx, key, uv.srcHash, prolly.ArtifactTypeUniqueKeyViol)
@@ -1121,7 +1121,7 @@ func (m *primaryMerger) merge(ctx *sql.Context, diff tree.ThreeWayDiff, sourceSc
// WARNING: In theory, we should only have to call MutableMap::Delete if the key is actually being deleted
// from the left branch. However, because of https://github.com/dolthub/dolt/issues/7192,
// if the left side of the merge is an empty table and we don't attempt to modify the map,
// the table will have an unexpectededed root hash.
// the table will have an unexpected root hash.
return m.mut.Delete(ctx, diff.Key)
case tree.DiffOpDivergentModifyResolved:
// any generated columns need to be re-resolved because their computed values may have changed as a result of
@@ -1177,7 +1177,7 @@ func (m *primaryMerger) merge(ctx *sql.Context, diff tree.ThreeWayDiff, sourceSc
}
return m.mut.Put(ctx, diff.Key, newTupleValue)
default:
return fmt.Errorf("unexpectededed diffOp for editing primary index: %s", diff.Op)
return fmt.Errorf("unexpected diffOp for editing primary index: %s", diff.Op)
}
}
@@ -1540,7 +1540,7 @@ func writeTupleExpression(
// instance that describes how the table is being merged, |from| is the field position in the value tuple from the
// previous schema, and |rightSide| indicates whether the previous type info can be found on the right side of the merge
// or the left side. If the previous type info is the same as the current type info for the merged schema, then this
// function is a no-op and simply returns |value|. The converted value along with any unexpectededed error encountered is
// function is a no-op and simply returns |value|. The converted value along with any unexpected error encountered is
// returned.
func convertValueToNewType(value interface{}, newTypeInfo typeinfo.TypeInfo, tm *TableMerger, from int, rightSide bool) (interface{}, error) {
var previousTypeInfo typeinfo.TypeInfo

View File

@@ -276,7 +276,7 @@ func nomsKindsFromQueryTypes(qt query.Type) []types.NomsKind {
return []types.NomsKind{types.JSONKind}
default:
panic(fmt.Sprintf("unexpectededed query.Type %s", qt.String()))
panic(fmt.Sprintf("unexpect query.Type %s", qt.String()))
}
}

View File

@@ -96,7 +96,7 @@ func (fh filehandler) ServeHTTP(respWr http.ResponseWriter, req *http.Request) {
}
_, ok := hash.MaybeParse(path[i+1:])
if !ok {
logger.WithField("last_path_component", path[i+1:]).Warn("bad request with unparsable last path component")
logger.WithField("last_path_component", path[i+1:]).Warn("bad request with unparseable last path component")
respWr.WriteHeader(http.StatusBadRequest)
return
}

View File

@@ -77,9 +77,9 @@ var DoltRebaseSystemTableSchema = []*sql.Column{
},
}
// ErrRebaseUncomittedChanges is used when a rebase is started, but there are uncomitted (and not
// ErrRebaseUncommittedChanges is used when a rebase is started, but there are uncommitted (and not
// ignored) changes in the working set.
var ErrRebaseUncomittedChanges = fmt.Errorf("cannot start a rebase with uncomitted changes")
var ErrRebaseUncommittedChanges = fmt.Errorf("cannot start a rebase with uncommitted changes")
// ErrRebaseConflict is used when a merge conflict is detected while rebasing a commit.
var ErrRebaseConflict = goerrors.NewKind(
@@ -319,7 +319,7 @@ func validateWorkingSetCanStartRebase(ctx *sql.Context) error {
return fmt.Errorf("unable to start rebase while another rebase is in progress abort the current rebase before proceeding")
}
// Make sure the working set doesn't contain any uncomitted changes
// Make sure the working set doesn't contain any uncommitted changes
roots, ok := doltSession.GetRoots(ctx, ctx.GetCurrentDatabase())
if !ok {
return fmt.Errorf("unable to get roots for database %s", ctx.GetCurrentDatabase())
@@ -329,7 +329,7 @@ func validateWorkingSetCanStartRebase(ctx *sql.Context) error {
return err
}
if !wsOnlyHasIgnoredTables {
return ErrRebaseUncomittedChanges
return ErrRebaseUncommittedChanges
}
return nil
@@ -473,7 +473,7 @@ func continueRebase(ctx *sql.Context) (string, error) {
func processRebasePlanStep(ctx *sql.Context, planStep *rebase.RebasePlanStep) error {
// Make sure we have a transaction opened for the session
// NOTE: After our first call to cherry-pick, the tx is comitted, so a new tx needs to be started
// NOTE: After our first call to cherry-pick, the tx is committed, so a new tx needs to be started
// as we process additional rebase actions.
doltSession := dsess.DSessFromSess(ctx.Session)
if doltSession.GetTransaction() == nil {

View File

@@ -3587,7 +3587,7 @@ var DoltBranchScripts = []queries.ScriptTest{
var DoltResetTestScripts = []queries.ScriptTest{
{
Name: "CALL DOLT_RESET('--hard') should reset the merge state after uncomitted merge",
Name: "CALL DOLT_RESET('--hard') should reset the merge state after uncommitted merge",
SetUpScript: []string{
"CREATE TABLE test1 (pk int NOT NULL, c1 int, c2 int, PRIMARY KEY (pk));",
"CALL DOLT_ADD('.')",
@@ -5703,7 +5703,7 @@ var DoltCherryPickTests = []queries.ScriptTest{
Assertions: []queries.ScriptTestAssertion{
{
Query: "CALL Dolt_Cherry_Pick(@commit1);",
ExpectedErrStr: "cannot cherry-pick with uncomitted changes",
ExpectedErrStr: "cannot cherry-pick with uncommitted changes",
},
{
Query: "call dolt_add('t');",
@@ -5711,7 +5711,7 @@ var DoltCherryPickTests = []queries.ScriptTest{
},
{
Query: "CALL Dolt_Cherry_Pick(@commit1);",
ExpectedErrStr: "cannot cherry-pick with uncomitted changes",
ExpectedErrStr: "cannot cherry-pick with uncommitted changes",
},
},
},

View File

@@ -581,7 +581,7 @@ var MergeScripts = []queries.ScriptTest{
// TODO: These tests are skipped, because we have temporarily disabled dolt_conflicts_resolve
// when there are schema conflicts, since schema conflicts prevent table data from being
// merged, and resolving the schema changes, but not completing the data merge will likely
// give customers unexpectededed results.
// give customers unexpected results.
// https://github.com/dolthub/dolt/issues/6616
Name: "CALL DOLT_MERGE with schema conflicts can be correctly resolved using dolt_conflicts_resolve when autocommit is off",
SetUpScript: []string{

View File

@@ -11629,13 +11629,13 @@ INSERT INTO symbols VALUES ('KRNY','Kearny Financial','Finance',NULL);
INSERT INTO symbols VALUES ('KRO','Kronos Worldwide Inc','Basic Industries',NULL);
INSERT INTO symbols VALUES ('KRP','Kimbell Royalty Partners','Energy',2017);
INSERT INTO symbols VALUES ('KRYS','Krystal Biotech, Inc.','Health Care',2017);
INSERT INTO symbols VALUES ('KSM','Scudder Strategic Municipal Income Trust',NULL,1989);
INSERT INTO symbols VALUES ('KSM','Scudder Strategic Municiple Income Trust',NULL,1989);
INSERT INTO symbols VALUES ('KSS','Kohl&#39;s Corporation','Consumer Services',1992);
INSERT INTO symbols VALUES ('KSU','Kansas City Southern','Transportation',NULL);
INSERT INTO symbols VALUES ('KSU^','Kansas City Southern','Transportation',NULL);
INSERT INTO symbols VALUES ('KT','KT Corporation','Public Utilities',NULL);
INSERT INTO symbols VALUES ('KTCC','Key Tronic Corporation','Technology',1983);
INSERT INTO symbols VALUES ('KTF','Scudder Municipal Income Trust',NULL,1988);
INSERT INTO symbols VALUES ('KTF','Scudder Municiple Income Trust',NULL,1988);
INSERT INTO symbols VALUES ('KTH','Lehman ABS Corporation','Finance',NULL);
INSERT INTO symbols VALUES ('KTN','Lehman ABS Corporation','Finance',NULL);
INSERT INTO symbols VALUES ('KTOS','Kratos Defense & Security Solutions, Inc.','Capital Goods',NULL);
@@ -12818,7 +12818,7 @@ INSERT INTO symbols VALUES ('PCI','PIMCO Dynamic Credit and Mortgage Income Fund
INSERT INTO symbols VALUES ('PCK','Pimco California Municipal Income Fund II',NULL,2002);
INSERT INTO symbols VALUES ('PCM','PIMCO Commercial Mortgage Securities Trust, Inc.',NULL,1993);
INSERT INTO symbols VALUES ('PCMI','PCM, Inc.','Consumer Services',NULL);
INSERT INTO symbols VALUES ('PCN','Pimco Corporate & Income Strategy Fund',NULL,2001);
INSERT INTO symbols VALUES ('PCN','Pimco Corporate & Income Stategy Fund',NULL,2001);
INSERT INTO symbols VALUES ('PCOM','Points International, Ltd.','Miscellaneous',NULL);
INSERT INTO symbols VALUES ('PCQ','PIMCO California Municipal Income Fund',NULL,2001);
INSERT INTO symbols VALUES ('PCRX','Pacira BioSciences, Inc.','Health Care',2011);
@@ -13502,7 +13502,7 @@ INSERT INTO symbols VALUES ('SBLKZ','Star Bulk Carriers Corp.','Transportation',
INSERT INTO symbols VALUES ('SBNA','Scorpio Tankers Inc.','Transportation',2014);
INSERT INTO symbols VALUES ('SBNY','Signature Bank','Finance',2004);
INSERT INTO symbols VALUES ('SBOT','Stellar Biotechnologies, Inc.','Health Care',NULL);
INSERT INTO symbols VALUES ('SBOW','SilverBow Resources, Inc.','Energy',NULL);
INSERT INTO symbols VALUES ('SBOW','SilverBow Resorces, Inc.','Energy',NULL);
INSERT INTO symbols VALUES ('SBPH','Spring Bank Pharmaceuticals, Inc.','Health Care',2016);
INSERT INTO symbols VALUES ('SBR','Sabine Royalty Trust','Energy',NULL);
INSERT INTO symbols VALUES ('SBRA','Sabra Health Care REIT, Inc.','Consumer Services',NULL);
@@ -14104,7 +14104,7 @@ INSERT INTO symbols VALUES ('THGA','The Hanover Insurance Group, Inc.','Finance'
INSERT INTO symbols VALUES ('THM','International Tower Hill Mines Ltd','Basic Industries',NULL);
INSERT INTO symbols VALUES ('THO','Thor Industries, Inc.','Consumer Non-Durables',NULL);
INSERT INTO symbols VALUES ('THOR','Synthorx, Inc.','Health Care',2018);
INSERT INTO symbols VALUES ('THQ','Tekla Healthcare Opportunities Fund',NULL,2014);
INSERT INTO symbols VALUES ('THQ','Tekla Healthcare Opportunies Fund',NULL,2014);
INSERT INTO symbols VALUES ('THR','Thermon Group Holdings, Inc.','Energy',2011);
INSERT INTO symbols VALUES ('THRM','Gentherm Inc','Capital Goods',NULL);
INSERT INTO symbols VALUES ('THS','Treehouse Foods, Inc.','Consumer Non-Durables',NULL);
@@ -17627,12 +17627,12 @@ INSERT INTO join_result VALUES ('stock','KRNY','us','2017-11-01',15.2,15.3,14.9,
INSERT INTO join_result VALUES ('stock','KRO','us','2017-11-01',26.68,26.7558,25.9,26.1,246853,'0','Kronos Worldwide Inc','Basic Industries',NULL);
INSERT INTO join_result VALUES ('stock','KRP','us','2017-11-01',16.689,16.807,16.631,16.64,19253,'0','Kimbell Royalty Partners','Energy',2017);
INSERT INTO join_result VALUES ('stock','KRYS','us','2017-11-01',9.349,10,9.349,9.55,15987,'0','Krystal Biotech, Inc.','Health Care',2017);
INSERT INTO join_result VALUES ('stock','KSM','us','2017-11-01',11.93,11.96,11.88,11.8953,82733,'0','Scudder Strategic Municipal Income Trust',NULL,1989);
INSERT INTO join_result VALUES ('stock','KSM','us','2017-11-01',11.93,11.96,11.88,11.8953,82733,'0','Scudder Strategic Municiple Income Trust',NULL,1989);
INSERT INTO join_result VALUES ('stock','KSS','us','2017-11-01',41.95,42.81,41.52,41.9,2970700,'0','Kohl&#39;s Corporation','Consumer Services',1992);
INSERT INTO join_result VALUES ('stock','KSU','us','2017-11-01',104.77,105.72,104.46,105.46,952870,'0','Kansas City Southern','Transportation',NULL);
INSERT INTO join_result VALUES ('stock','KT','us','2017-11-01',14.4,14.41,13.91,14.13,1238720,'0','KT Corporation','Public Utilities',NULL);
INSERT INTO join_result VALUES ('stock','KTCC','us','2017-11-01',7.23,7.8,7.07,7.8,112870,'0','Key Tronic Corporation','Technology',1983);
INSERT INTO join_result VALUES ('stock','KTF','us','2017-11-01',11.89,11.94,11.83,11.87,277606,'0','Scudder Municipal Income Trust',NULL,1988);
INSERT INTO join_result VALUES ('stock','KTF','us','2017-11-01',11.89,11.94,11.83,11.87,277606,'0','Scudder Municiple Income Trust',NULL,1988);
INSERT INTO join_result VALUES ('stock','KTH','us','2017-11-01',32.41,32.53,32.41,32.53,1371,'0','Lehman ABS Corporation','Finance',NULL);
INSERT INTO join_result VALUES ('stock','KTN','us','2017-11-01',33.8,33.8,32.7304,33.73,3746,'0','Lehman ABS Corporation','Finance',NULL);
INSERT INTO join_result VALUES ('stock','KTOS','us','2017-11-01',12.16,12.3,11.65,11.9,1603390,'0','Kratos Defense & Security Solutions, Inc.','Capital Goods',NULL);
@@ -18542,7 +18542,7 @@ INSERT INTO join_result VALUES ('stock','PCI','us','2017-11-01',22.619,22.639,22
INSERT INTO join_result VALUES ('stock','PCK','us','2017-11-01',9.971,10.01,9.941,9.951,68472,'0','Pimco California Municipal Income Fund II',NULL,2002);
INSERT INTO join_result VALUES ('stock','PCM','us','2017-11-01',11.95,11.97,11.821,11.88,57610,'0','PIMCO Commercial Mortgage Securities Trust, Inc.',NULL,1993);
INSERT INTO join_result VALUES ('stock','PCMI','us','2017-11-01',14.2,14.5,14,14.25,87457,'0','PCM, Inc.','Consumer Services',NULL);
INSERT INTO join_result VALUES ('stock','PCN','us','2017-11-01',16.881,16.961,16.841,16.871,79197,'0','Pimco Corporate & Income Strategy Fund',NULL,2001);
INSERT INTO join_result VALUES ('stock','PCN','us','2017-11-01',16.881,16.961,16.841,16.871,79197,'0','Pimco Corporate & Income Stategy Fund',NULL,2001);
INSERT INTO join_result VALUES ('stock','PCOM','us','2017-11-01',11.5,11.5,11.01,11.08,45690,'0','Points International, Ltd.','Miscellaneous',NULL);
INSERT INTO join_result VALUES ('stock','PCQ','us','2017-11-01',17.08,17.15,16.951,16.971,100898,'0','PIMCO California Municipal Income Fund',NULL,2001);
INSERT INTO join_result VALUES ('stock','PCRX','us','2017-11-01',32.4,33.3,32.2,32.6,797680,'0','Pacira BioSciences, Inc.','Health Care',2011);
@@ -19055,7 +19055,7 @@ INSERT INTO join_result VALUES ('stock','SBLK','us','2017-11-01',10.85,11,10.5,1
INSERT INTO join_result VALUES ('stock','SBNA','us','2017-11-01',24.4,24.75,24.4,24.7,1334,'0','Scorpio Tankers Inc.','Transportation',2014);
INSERT INTO join_result VALUES ('stock','SBNY','us','2017-11-01',131.31,131.96,128.85,129.7,414960,'0','Signature Bank','Finance',2004);
INSERT INTO join_result VALUES ('stock','SBOT','us','2017-11-01',1.15,1.16,1.11,1.12,53927,'0','Stellar Biotechnologies, Inc.','Health Care',NULL);
INSERT INTO join_result VALUES ('stock','SBOW','us','2017-11-01',22.83,22.9572,22.26,22.27,28686,'0','SilverBow Resources, Inc.','Energy',NULL);
INSERT INTO join_result VALUES ('stock','SBOW','us','2017-11-01',22.83,22.9572,22.26,22.27,28686,'0','SilverBow Resorces, Inc.','Energy',NULL);
INSERT INTO join_result VALUES ('stock','SBPH','us','2017-11-01',15.03,15.41,14.79,15.12,38453,'0','Spring Bank Pharmaceuticals, Inc.','Health Care',2016);
INSERT INTO join_result VALUES ('stock','SBR','us','2017-11-01',42.65,42.8,42.3946,42.65,12375,'0','Sabine Royalty Trust','Energy',NULL);
INSERT INTO join_result VALUES ('stock','SBRA','us','2017-11-01',19.88,20.14,19.75,20.14,1558500,'0','Sabra Health Care REIT, Inc.','Consumer Services',NULL);
@@ -19505,7 +19505,7 @@ INSERT INTO join_result VALUES ('stock','THG','us','2017-11-01',99.05,99.7,98.52
INSERT INTO join_result VALUES ('stock','THGA','us','2017-11-01',25.51,25.5226,25.4801,25.4801,1471,'0','The Hanover Insurance Group, Inc.','Finance',NULL);
INSERT INTO join_result VALUES ('stock','THM','us','2017-11-01',0.434,0.4379,0.4239,0.4252,81904,'0','International Tower Hill Mines Ltd','Basic Industries',NULL);
INSERT INTO join_result VALUES ('stock','THO','us','2017-11-01',137.64,138.07,136.07,136.82,575303,'0','Thor Industries, Inc.','Consumer Non-Durables',NULL);
INSERT INTO join_result VALUES ('stock','THQ','us','2017-11-01',17.63,17.63,17.5,17.53,162058,'0','Tekla Healthcare Opportunities Fund',NULL,2014);
INSERT INTO join_result VALUES ('stock','THQ','us','2017-11-01',17.63,17.63,17.5,17.53,162058,'0','Tekla Healthcare Opportunies Fund',NULL,2014);
INSERT INTO join_result VALUES ('stock','THR','us','2017-11-01',21.73,21.73,21.16,21.27,164411,'0','Thermon Group Holdings, Inc.','Energy',2011);
INSERT INTO join_result VALUES ('stock','THRM','us','2017-11-01',33.7,34.45,33.25,33.65,215357,'0','Gentherm Inc','Capital Goods',NULL);
INSERT INTO join_result VALUES ('stock','THS','us','2017-11-01',66.52,67.13,66.05,66.36,1109140,'0','Treehouse Foods, Inc.','Consumer Non-Durables',NULL);

View File

@@ -164,7 +164,7 @@ func TestGet(t *testing.T) {
key5 := newTuple(t, types.Int(5))
key6 := newTuple(t, types.Int(6))
// test uncomitted
// test uncommitted
requireGet(ctx, t, tea, key1, false)
teaInsert(t, tea, key1)
requireGet(ctx, t, tea, key1, true)
@@ -172,22 +172,22 @@ func TestGet(t *testing.T) {
require.NoError(t, err)
requireGet(ctx, t, tea, key1, false)
// test uncomitted flushed
// test uncommitted flushed
teaInsert(t, tea, key1)
requireGet(ctx, t, tea, key1, true)
tea.flushUncomitted()
tea.flushUncommitted()
requireGet(ctx, t, tea, key1, true)
err = tea.Rollback(ctx)
require.NoError(t, err)
requireGet(ctx, t, tea, key1, false)
// test comitted
// test commmitted
teaInsert(t, tea, key1)
err = tea.Commit(ctx, nbf)
require.NoError(t, err)
requireGet(ctx, t, tea, key1, true)
// edits in comitted and uncomitted
// edits in committed and uncommitted
requireGet(ctx, t, tea, key2, false)
teaInsert(t, tea, key2)
requireGet(ctx, t, tea, key1, true)
@@ -197,11 +197,11 @@ func TestGet(t *testing.T) {
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, false)
// edits in comitted and uncomitted flushed
// edits in committed and uncommitted flushed
teaInsert(t, tea, key2)
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, true)
tea.flushUncomitted()
tea.flushUncommitted()
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, true)
err = tea.Rollback(ctx)
@@ -209,10 +209,10 @@ func TestGet(t *testing.T) {
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, false)
// edits in comitted, uncomitted and uncomitted flushed
// edits in committed, uncommitted and uncommitted flushed
requireGet(ctx, t, tea, key3, false)
teaInsert(t, tea, key2)
tea.flushUncomitted()
tea.flushUncommitted()
teaInsert(t, tea, key3)
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, true)
@@ -225,7 +225,7 @@ func TestGet(t *testing.T) {
// edits everywhere materialized
teaInsert(t, tea, key2)
tea.flushUncomitted()
tea.flushUncommitted()
teaInsert(t, tea, key3)
requireGet(ctx, t, tea, key1, true)
requireGet(ctx, t, tea, key2, true)
@@ -247,7 +247,7 @@ func TestGet(t *testing.T) {
requireGet(ctx, t, tea, key4, true)
teaDelete(t, tea, key2)
teaInsert(t, tea, key5)
tea.flushUncomitted()
tea.flushUncommitted()
requireGet(ctx, t, tea, key2, false)
requireGet(ctx, t, tea, key5, true)
teaInsert(t, tea, key6)

View File

@@ -233,7 +233,7 @@ func TestValidation(t *testing.T) {
}
if apr.ContainsAny("string2", "flag2", "integer2") {
t.Error("Contains unexpectededed parameter(s)")
t.Error("Contains unexpected parameter(s)")
}
if val := apr.MustGetValue("string"); val != "string" {

View File

@@ -93,14 +93,14 @@ type WalkableFS interface {
Iter(directory string, recursive bool, cb FSIterCB) error
}
// ReadWriteFS is an interface whose implementers will provide read, and write implementations but may not allow
// ReadWriteFS is an interface whose implementors will provide read, and write implementations but may not allow
// for files to be listed.
type ReadWriteFS interface {
ReadableFS
WritableFS
}
// Filesys is an interface whose implementers will provide read, write, and list mechanisms
// Filesys is an interface whose implementors will provide read, write, and list mechanisms
type Filesys interface {
ReadableFS
WritableFS

View File

@@ -562,7 +562,7 @@ func (fs *InMemFS) moveDirHelper(dir *memDir, destPath string) error {
delete(dir.objs, obj.absPath)
delete(fs.objs, obj.absPath)
default:
return fmt.Errorf("unexpectededed type of memory object: %T", v)
return fmt.Errorf("unexpected type of memory object: %T", v)
}
}

View File

@@ -281,8 +281,8 @@ func (r *Result) populateHistogram(buf []byte) error {
var err error
{
timer := regexp.MustCompile(`total time:\s+([0-9][0-9]*\.[0-9]+)s\n`)
res := timer.FindSubmatch(buf)
timeRe := regexp.MustCompile(`total time:\s+([0-9][0-9]*\.[0-9]+)s\n`)
res := timeRe.FindSubmatch(buf)
if len(res) == 0 {
return fmt.Errorf("time not found")
}

View File

@@ -79,7 +79,7 @@ func Run(ctx context.Context, config SysbenchConfig) error {
fmt.Println("Running postgres sysbench tests")
b = NewPostgresBenchmarker(cwd, config, sc)
default:
panic(fmt.Sprintf("unexpectededed server type: %s", st))
panic(fmt.Sprintf("unexpected server type: %s", st))
}
results, err = b.Benchmark(ctx)

View File

@@ -49,7 +49,7 @@ func RunTpcc(ctx context.Context, config TpccConfig) error {
fmt.Println("Running mysql tpcc benchmarks")
b = NewMysqlTpccBenchmarker(cwd, config, sc)
default:
panic(fmt.Sprintf("unexpectededed server type: %s", st))
panic(fmt.Sprintf("unexpected server type: %s", st))
}
results, err = b.Benchmark(ctx)

View File

@@ -17,7 +17,7 @@ namespace serial;
table AddressMap {
// sorted array of key items
key_items:[ubyte] (required);
// items offsets for |key_items|
// items offets for |key_items|
// first offset is 0, last offset is len(key_items)
key_offsets:[uint16] (required);

View File

@@ -18,7 +18,7 @@ table MergeArtifacts {
// sorted array of key items
// key items are encoded as TupleFormatAlpha
key_items:[ubyte] (required);
// items offsets for |key_items|
// items offets for |key_items|
// first offset is 0, last offset is len(key_items)
key_offsets:[uint16] (required);

View File

@@ -22,7 +22,7 @@ enum ItemType : uint8 {
table ProllyTreeNode {
// sorted array of key items
key_items:[ubyte] (required);
// items offsets for |key_items|
// items offets for |key_items|
// first offset is 0, last offset is len(key_items)
key_offsets:[uint16] (required);
// item type for |key_items|

View File

@@ -47,8 +47,8 @@ const (
var (
ErrOptimisticLockFailed = errors.New("optimistic lock failed on database Root update")
ErrMergeNeeded = errors.New("dataset head is not ancestor of commit")
ErrAlreadyComitted = errors.New("dataset head already pointing at given commit")
ErrDirtyWorkspace = errors.New("target has uncomitted changes. --force required to overwrite")
ErrAlreadyCommitted = errors.New("dataset head already pointing at given commit")
ErrDirtyWorkspace = errors.New("target has uncommitted changes. --force required to overwrite")
)
// rootTracker is a narrowing of the ChunkStore interface, to keep Database disciplined about working directly with Chunks
@@ -487,7 +487,7 @@ func (db *database) doFastForward(ctx context.Context, ds Dataset, newHeadAddr h
}
if curr != (hash.Hash{}) {
if curr == h {
return prolly.AddressMap{}, ErrAlreadyComitted
return prolly.AddressMap{}, ErrAlreadyCommitted
}
}
@@ -573,7 +573,7 @@ func (db *database) doFastForward(ctx context.Context, ds Dataset, newHeadAddr h
return ae.Flush(ctx)
})
if err == ErrAlreadyComitted {
if err == ErrAlreadyCommitted {
return nil
}
@@ -655,7 +655,7 @@ func buildClassicCommitFunc(db Database, datasetID string, datasetCurrentAddr ha
return types.Map{}, ErrMergeNeeded
}
if currRef.TargetHash() == newCommitValueRef.TargetHash() {
return types.Map{}, ErrAlreadyComitted
return types.Map{}, ErrAlreadyCommitted
}
} else if datasetCurrentAddr != (hash.Hash{}) {
return types.Map{}, ErrMergeNeeded
@@ -682,7 +682,7 @@ func (db *database) doCommit(ctx context.Context, datasetID string, datasetCurre
}
if curr != (hash.Hash{}) {
if curr == h {
return prolly.AddressMap{}, ErrAlreadyComitted
return prolly.AddressMap{}, ErrAlreadyCommitted
}
}

View File

@@ -112,7 +112,7 @@ func PrintDiff(ctx context.Context, w io.Writer, v1, v2 types.Value, leftRight b
return err
}
} else {
panic("unexpectededed Path type")
panic("unexpected Path type")
}
case types.Set:
// default values are ok

View File

@@ -125,7 +125,7 @@ func IsValid(s string) bool {
func Parse(s string) Hash {
r, ok := MaybeParse(s)
if !ok {
d.PanicIfError(fmt.Errorf("count not parse Hash: %s", s))
d.PanicIfError(fmt.Errorf("could not parse Hash: %s", s))
}
return r
}

View File

@@ -767,6 +767,6 @@ func shouldEncodeAsSet(t reflect.Type, tags nomsTags) bool {
t.Elem().Kind() == reflect.Struct &&
t.Elem().NumField() == 0
default:
panic(fmt.Errorf("called with unexpectededed kind %v", t.Kind()))
panic(fmt.Errorf("called with unexpected kind %v", t.Kind()))
}
}

View File

@@ -52,7 +52,7 @@ func None(aChange, bChange types.DiffChangeType, a, b types.Value, path types.Pa
return change, merged, false
}
// Ours resolves conflicts by preferring changes from the Value currently being comitted.
// Ours resolves conflicts by preferring changes from the Value currently being committed.
func Ours(aChange, bChange types.DiffChangeType, a, b types.Value, path types.Path) (change types.DiffChangeType, merged types.Value, ok bool) {
return aChange, a, true
}

View File

@@ -22,7 +22,7 @@ When backed by AWS, NBS stores its data mainly in S3, along with a single Dynamo
## Perf
For the file back-end, perf is substantially better than LevelDB mainly because LDB spends substantial IO with the goal of keeping KV pairs in key-order which doesn't benenfit Noms at all. NBS locates related chunks together and thus reading data from a NBS store can be done quite a lot faster. As an example, storing & retrieving a 1.1GB MP4 video file on a MBP i5 2.9Ghz:
For the file back-end, perf is substantially better than LevelDB mainly because LDB spends substantial IO with the goal of keeping KV pairs in key-order which doesn't benefit Noms at all. NBS locates related chunks together and thus reading data from an NBS store can be done quite a lot faster. As an example, storing & retrieving a 1.1GB MP4 video file on a MBP i5 2.9Ghz:
* LDB
* Initial import: 44 MB/s, size on disk: 1.1 GB.

View File

@@ -150,7 +150,7 @@ func nomsFileTableReader(ctx context.Context, path string, h hash.Hash, chunkCou
if chunkCount != index.chunkCount() {
index.Close()
f.Close()
return nil, errors.New("unexpectededed chunk count")
return nil, errors.New("unexpected chunk count")
}
tr, err := newTableReader(index, &fileReaderAt{f, path, sz}, fileBlockSize)

View File

@@ -99,7 +99,7 @@ import (
Looking up Chunks in an NBS Table
There are two phashes to loading chunk data for a given Hash from an NBS Table: Checking for the chunk's presence, and fetching the chunk's bytes. When performing a has-check, only the first phase is necessary.
There are two phases to loading chunk data for a given Hash from an NBS Table: Checking for the chunk's presence, and fetching the chunk's bytes. When performing a has-check, only the first phase is necessary.
Phase one: Chunk presence
- Slice off the first 8 bytes of your Hash to create a Prefix

View File

@@ -322,7 +322,7 @@ The Mouse did not notice this question, but hurriedly went on, “—found it
What I was going to say, said the Dodo in an offended tone, was, that the best thing to get us dry would be a Caucus-race.
What is a Caucus-race? said Alice; not that she wanted much to know, but the Dodo had paused as if it thought that somebody ought to speak, and no one else seemed inclined to say anything.
Why, said the Dodo, the best way to explain it is to do it. (And, as you might like to try the thing yourself, some winter day, I will tell you how the Dodo managed it.)
First it marked out a race-course, in a sort of circle, (the exact shape doesnt matter, it said,) and then all the party were placed along the course, here and there. There was no One, two, three, and away, but they began running when they liked, and left off when they liked, so that it was not easy to know when the race was over. However, when they had been running half an hour or so, and were quite dry again, the Dodo suddenly called out The race is over! and they all crowded round it, painting, and asking, But who has won?
First it marked out a race-course, in a sort of circle, (the exact shape doesnt matter, it said,) and then all the party were placed along the course, here and there. There was no One, two, three, and away, but they began running when they liked, and left off when they liked, so that it was not easy to know when the race was over. However, when they had been running half an hour or so, and were quite dry again, the Dodo suddenly called out The race is over! and they all crowded round it, panting, and asking, But who has won?
This question the Dodo could not answer without a great deal of thought, and it sat for a long time with one finger pressed upon its forehead (the position in which you usually see Shakespeare, in the pictures of him), while the rest waited in silence. At last the Dodo said, Everybody has won, and all must have prizes.
But who is to give the prizes? quite a chorus of voices asked.
Why, she, of course, said the Dodo, pointing to Alice with one finger; and the whole party at once crowded round her, calling out in a confused way, Prizes! Prizes!
@@ -438,7 +438,7 @@ So she swallowed one of the cakes, and was delighted to find that she began shri
The first thing Ive got to do, said Alice to herself, as she wandered about in the wood, is to grow to my right size again; and the second thing is to find my way into that lovely garden. I think that will be the best plan.
It sounded an excellent plan, no doubt, and very neatly and simply arranged; the only difficulty was, that she had not the smallest idea how to set about it; and while she was peering about anxiously among the trees, a little sharp bark just over her head made her look up in a great hurry.
An enormous puppy was looking down at her with large round eyes, and feebly stretching out one paw, trying to touch her. Poor little thing! said Alice, in a coaxing tone, and she tried hard to whistle to it; but she was terribly frightened all the time at the thought that it might be hungry, in which case it would be very likely to eat her up in spite of all her coaxing.
Hardly knowing what she did, she picked up a little bit of stick, and held it out to the puppy; whereupon the puppy jumped into the air off all its feet at once, with a yelp of delight, and rushed at the stick, and made believe to worry it; then Alice dodged behind a great thistle, to keep herself from being run over; and the moment she appeared on the other side, the puppy made another rush at the stick, and tumbled head over heels in its hurry to get hold of it; then Alice, thinking it was very like having a game of play with a cart-horse, and expecting every moment to be trampled under its feet, ran round the thistle again; then the puppy began a series of short charges at the stick, running a very little way forwards each time and a long way back, and barking hoarsely all the while, till at last it sat down a good way off, painting, with its tongue hanging out of its mouth, and its great eyes half shut.
Hardly knowing what she did, she picked up a little bit of stick, and held it out to the puppy; whereupon the puppy jumped into the air off all its feet at once, with a yelp of delight, and rushed at the stick, and made believe to worry it; then Alice dodged behind a great thistle, to keep herself from being run over; and the moment she appeared on the other side, the puppy made another rush at the stick, and tumbled head over heels in its hurry to get hold of it; then Alice, thinking it was very like having a game of play with a cart-horse, and expecting every moment to be trampled under its feet, ran round the thistle again; then the puppy began a series of short charges at the stick, running a very little way forwards each time and a long way back, and barking hoarsely all the while, till at last it sat down a good way off, panting, with its tongue hanging out of its mouth, and its great eyes half shut.
This seemed to Alice a good opportunity for making her escape; so she set off at once, and ran till she was quite tired and out of breath, and till the puppys bark sounded quite faint in the distance.
And yet what a dear little puppy it was! said Alice, as she leant against a buttercup to rest herself, and fanned herself with one of the leaves: I should have liked teaching it tricks very much, if—if Id only been the right size to do it! Oh dear! Id nearly forgotten that Ive got to grow up again! Let me see—how is it to be managed? I suppose I ought to eat or drink something or other; but the great question is, what?
The great question certainly was, what? Alice looked all round her at the flowers and the blades of grass, but she did not see anything that looked like the right thing to eat or drink under the circumstances. There was a large mushroom growing near her, about the same height as herself; and when she had looked under it, and on both sides of it, and behind it, it occurred to her that she might as well look and see what was on the top of it.

View File

@@ -62,22 +62,22 @@ func TestKVPCollItr(t *testing.T) {
itr := NewItr(vrw, coll)
for i := 0; i < 2; i++ {
for _, express := range test.itrResults {
for _, expRes := range test.itrResults {
kvp, buff, done := itr.nextForDestructiveMerge()
kval, err := kvp.Key.Value(ctx)
assert.NoError(t, err)
if !kval.Equals(types.Uint(express.keyVal)) {
t.Error("unexpectededed result")
if !kval.Equals(types.Uint(expRes.keyVal)) {
t.Error("unexpected result")
}
if (buff != nil) != express.exhaustedBuf {
t.Error("unexpectededed buffer result")
if (buff != nil) != expRes.exhaustedBuf {
t.Error("unexpected buffer result")
}
if done != express.done {
t.Error("unexpectededed is done value.")
if done != expRes.done {
t.Error("unexpected is done value.")
}
}

View File

@@ -497,7 +497,7 @@ func TestMapMutationReadWriteCount(t *testing.T) {
return temp.NewStruct(vs.Format(), []Value{
Bool(i%2 == 0),
Float(i),
String(fmt.Sprintf("I AM A REALY REALY REAL SUPER CALIFRAGILISTICLY CRAZY-ASSED LONGTASTIC String %d", i)),
String(fmt.Sprintf("I AM A REALLY REALY REALL SUPER CALIFRAGILISTICLY CRAZY-ASSED LONGTASTIC String %d", i)),
String(fmt.Sprintf("I am a bit shorter and also more chill: %d", i)),
})
}
@@ -765,9 +765,9 @@ func TestMapHasRemove(t *testing.T) {
m, err = me.Map(context.Background())
require.NoError(t, err)
expected := []string{"a", "c", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "p", "q", "r", "s"}
unexpectededed := []string{"b", "d", "o"}
unexpected := []string{"b", "d", "o"}
assert.True(hasAll(m, expected...))
assert.True(hasNone(m, unexpectededed...))
assert.True(hasNone(m, unexpected...))
assert.True(m.Len() == uint64(len(expected)))
}

View File

@@ -541,10 +541,10 @@ func (ms metaSequence) getChildren(ctx context.Context, start, end uint64) ([]se
}
if len(hs) == 0 {
return seqs, nil // can occur with ptree that is fully uncomitted
return seqs, nil // can occur with ptree that is fully uncommitted
}
// Fetch comitted child sequences in a single batch
// Fetch committed child sequences in a single batch
readValues, err := ms.vrw.ReadManyValues(ctx, hs)
if err != nil {

View File

@@ -63,7 +63,7 @@ Isaac Dunham
Jaydeep Patil
Jens Gustedt
Jeremy Huntwork
Jo-Philipp Which
Jo-Philipp Wich
Joakim Sindholt
John Spencer
Julien Ramseier
@@ -76,7 +76,7 @@ Luca Barbato
Luka Perkov
M Farkas-Dyck (Strake)
Mahesh Bodapati
Markus Whichmann
Markus Wichmann
Masanori Ogino
Michael Clark
Michael Forney
@@ -163,7 +163,7 @@ under the standard MIT terms.
All other files which have no copyright comments are original works
produced specifically for use as part of this library, written either
by Rich Felker, the main author of the library, or by one or more
contributors listed above. Details on authorship of individual files
contibutors listed above. Details on authorship of individual files
can be found in the git version control history of the project. The
omission of copyright and license comments in each file is in the
interest of source tree size.