diff --git a/go/store/nbs/archive_build.go b/go/store/nbs/archive_build.go index 361b0e2977..edca1db9fb 100644 --- a/go/store/nbs/archive_build.go +++ b/go/store/nbs/archive_build.go @@ -130,7 +130,7 @@ func BuildArchive(ctx context.Context, cs chunks.ChunkStore, dagGroups *ChunkRel return err } } else { - return errors.New("Modern DB Expected") + return errors.New("runtime error: GenerationalNBS Expected") } return nil } @@ -475,7 +475,8 @@ func compressChunksInParallel( } cmpBuff = gozstd.CompressDict(cmpBuff[:0], c.Data(), defaultDict) - // Make a private copy of the compressed data + // Make a private copy of the compressed data for the result channel. + // Unfortunate alloc. Could use a pool of buffers if this proves to be a bottleneck. privateCopy := make([]byte, len(cmpBuff)) copy(privateCopy, cmpBuff) resultCh <- compressedChunk{h: h, data: privateCopy} diff --git a/integration-tests/bats/archive.bats b/integration-tests/bats/archive.bats index d39cfcf598..f9a885032c 100755 --- a/integration-tests/bats/archive.bats +++ b/integration-tests/bats/archive.bats @@ -50,7 +50,7 @@ UPDATE tbl SET guid = UUID() WHERE i >= @random_id LIMIT 1;" # A series of 10 update-and-commit-then-insert-and-commit pairs, followed by a dolt_gc call # -# This is useful because we need at least 25 retained chunks to create a commit. +# This is useful because we need at least 25 retained chunks to create an archive. mutations_and_gc_statement() { query=`update_statement` for ((j=1; j<=9; j++)) @@ -72,14 +72,27 @@ mutations_and_gc_statement() { [[ "$output" =~ "Not enough samples to build default dictionary" ]] || false } -@test "archive: require gc first" { - run dolt archive - [ "$status" -eq 1 ] - [[ "$output" =~ "Run 'dolt gc' first" ]] || false +@test "archive: single archive oldgen" { + dolt sql -q "$(mutations_and_gc_statement)" + dolt archive + + files=$(find . -name "*darc" | wc -l | sed 's/[ \t]//g') + [ "$files" -eq "1" ] + + # Ensure updates continue to work. + dolt sql -q "$(update_statement)" } -@test "archive: single archive" { +@test "archive: single archive newgen" { dolt sql -q "$(mutations_and_gc_statement)" + + mkdir remote + dolt remote add origin file://remote + dolt push origin main + + dolt clone file://remote cloned + cd cloned + dolt archive files=$(find . -name "*darc" | wc -l | sed 's/[ \t]//g')