go: store,remotesrv: Improve logging. Improve gRPC error statuses for certain errros that arise after a GC.

This commit is contained in:
Aaron Son
2025-02-21 11:31:23 -08:00
parent 97fd0f8112
commit e4a4b5dadb
5 changed files with 37 additions and 18 deletions

View File

@@ -36,6 +36,7 @@ import (
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/store/chunks"
"github.com/dolthub/dolt/go/store/hash"
"github.com/dolthub/dolt/go/store/nbs"
"github.com/dolthub/dolt/go/store/types"
)
@@ -97,7 +98,7 @@ func (rs *RemoteChunkStore) HasChunks(ctx context.Context, req *remotesapi.HasCh
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -155,7 +156,7 @@ func (rs *RemoteChunkStore) GetDownloadLocations(ctx context.Context, req *remot
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -233,7 +234,7 @@ func (rs *RemoteChunkStore) StreamDownloadLocations(stream remotesapi.ChunkStore
"num_requested": numHashes,
"num_urls": numUrls,
"num_ranges": numRanges,
}).Info("finished")
}).Trace("finished")
}()
logger := ologger
@@ -387,7 +388,7 @@ func (rs *RemoteChunkStore) GetUploadLocations(ctx context.Context, req *remotes
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
_, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -445,7 +446,7 @@ func (rs *RemoteChunkStore) Rebase(ctx context.Context, req *remotesapi.RebaseRe
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
_, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -462,7 +463,7 @@ func (rs *RemoteChunkStore) Root(ctx context.Context, req *remotesapi.RootReques
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -485,7 +486,7 @@ func (rs *RemoteChunkStore) Commit(ctx context.Context, req *remotesapi.CommitRe
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -500,7 +501,11 @@ func (rs *RemoteChunkStore) Commit(ctx context.Context, req *remotesapi.CommitRe
err = cs.AddTableFilesToManifest(ctx, updates, rs.getAddrs(cs.Version()))
if err != nil {
logger.WithError(err).Error("error calling AddTableFilesToManifest")
return nil, status.Errorf(codes.Internal, "manifest update error: %v", err)
code := codes.Internal
if errors.Is(err, nbs.ErrDanglingRef) || errors.Is(err, nbs.ErrTableFileNotFound) {
code = codes.FailedPrecondition
}
return nil, status.Errorf(code, "manifest update error: %v", err)
}
currHash := hash.New(req.Current)
@@ -513,7 +518,11 @@ func (rs *RemoteChunkStore) Commit(ctx context.Context, req *remotesapi.CommitRe
"last_hash": lastHash.String(),
"curr_hash": currHash.String(),
}).Error("error calling Commit")
return nil, status.Errorf(codes.Internal, "failed to commit: %v", err)
code := codes.Internal
if errors.Is(err, nbs.ErrDanglingRef) || errors.Is(err, nbs.ErrTableFileNotFound) {
code = codes.FailedPrecondition
}
return nil, status.Errorf(code, "failed to commit: %v", err)
}
logger.Tracef("Commit success; moved from %s -> %s", lastHash.String(), currHash.String())
@@ -528,7 +537,7 @@ func (rs *RemoteChunkStore) GetRepoMetadata(ctx context.Context, req *remotesapi
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getOrCreateStore(ctx, logger, repoPath, req.ClientRepoFormat.NbfVersion)
if err != nil {
@@ -556,7 +565,7 @@ func (rs *RemoteChunkStore) ListTableFiles(ctx context.Context, req *remotesapi.
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -634,7 +643,7 @@ func (rs *RemoteChunkStore) AddTableFiles(ctx context.Context, req *remotesapi.A
}
repoPath := getRepoPath(req)
logger = logger.WithField(RepoPathField, repoPath)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
cs, err := rs.getStore(ctx, logger, repoPath)
if err != nil {
@@ -649,7 +658,11 @@ func (rs *RemoteChunkStore) AddTableFiles(ctx context.Context, req *remotesapi.A
err = cs.AddTableFilesToManifest(ctx, updates, rs.getAddrs(cs.Version()))
if err != nil {
logger.WithError(err).Error("error occurred updating the manifest")
return nil, status.Error(codes.Internal, "manifest update error")
code := codes.Internal
if errors.Is(err, nbs.ErrDanglingRef) || errors.Is(err, nbs.ErrTableFileNotFound) {
code = codes.FailedPrecondition
}
return nil, status.Error(code, "manifest update error")
}
logger = logger.WithFields(logrus.Fields{
@@ -707,7 +720,7 @@ func getReqLogger(lgr *logrus.Entry, method string) *logrus.Entry {
"method": method,
"request_num": strconv.Itoa(incReqId()),
})
lgr.Info("starting request")
lgr.Trace("starting request")
return lgr
}

View File

@@ -66,7 +66,7 @@ func newFileHandler(lgr *logrus.Entry, dbCache DBCache, fs filesys.Filesys, read
func (fh filehandler) ServeHTTP(respWr http.ResponseWriter, req *http.Request) {
logger := getReqLogger(fh.lgr, req.Method+"_"+req.RequestURI)
defer func() { logger.Info("finished") }()
defer func() { logger.Trace("finished") }()
var err error
req.URL, err = fh.sealer.Unseal(req.URL)

View File

@@ -16,6 +16,7 @@ package sqle
import (
"context"
"errors"
"io"
"sync"
"time"
@@ -27,6 +28,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dprocedures"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/store/chunks"
"github.com/dolthub/dolt/go/store/datas"
"github.com/dolthub/dolt/go/store/types"
)
@@ -147,7 +149,9 @@ func (c *AutoGCController) doWork(ctx context.Context, work autoGCWork, ctxF fun
defer sql.SessionCommandEnd(sqlCtx.Session)
err = dprocedures.RunDoltGC(sqlCtx, work.db, types.GCModeDefault, work.name)
if err != nil {
c.lgr.Warnf("sqle/auto_gc: Attempt to auto GC database %s failed with error: %v", work.name, err)
if !errors.Is(err, chunks.ErrNothingToCollect) {
c.lgr.Warnf("sqle/auto_gc: Attempt to auto GC database %s failed with error: %v", work.name, err)
}
return
}
c.lgr.Infof("sqle/auto_gc: Successfully completed auto GC of database %s in %v", work.name, time.Since(start))

View File

@@ -33,6 +33,8 @@ import (
"github.com/dolthub/dolt/go/store/hash"
)
var ErrTableFileNotFound = errors.New("table file not found")
type fileTableReader struct {
tableReader
h hash.Hash
@@ -81,7 +83,7 @@ func newFileTableReader(ctx context.Context, dir string, h hash.Hash, chunkCount
} else if afExists {
return newArchiveChunkSource(ctx, dir, h, chunkCount, q)
}
return nil, errors.New(fmt.Sprintf("table file %s/%s not found", dir, h.String()))
return nil, fmt.Errorf("error opening table file: %w: %s/%s", ErrTableFileNotFound, dir, h.String())
}
func nomsFileTableReader(ctx context.Context, path string, h hash.Hash, chunkCount uint32, q MemoryQuotaProvider) (cs chunkSource, err error) {

View File

@@ -1735,7 +1735,7 @@ func refCheckAllSources(ctx context.Context, nbs *NomsBlockStore, getAddrs chunk
checkErr = err
}
if len(remaining) > 0 {
checkErr = fmt.Errorf("%w, missing: %v", errors.New("cannot add table files; referenced chunk not found in store."), remaining)
checkErr = fmt.Errorf("cannot add table files: %w, missing: %v", ErrTableFileNotFound, remaining)
}
}
for _, source := range sources {