mirror of
https://github.com/dolthub/dolt.git
synced 2026-01-26 02:58:44 -06:00
add Blob.Splice() (#2242)
This commit is contained in:
@@ -34,6 +34,27 @@ func (b Blob) Reader() io.ReadSeeker {
|
||||
return &BlobReader{b.seq, cursor, nil, 0}
|
||||
}
|
||||
|
||||
func (b Blob) Splice(idx uint64, deleteCount uint64, data []byte) Blob {
|
||||
if deleteCount == 0 && len(data) == 0 {
|
||||
return b
|
||||
}
|
||||
|
||||
d.Chk.True(idx <= b.Len())
|
||||
d.Chk.True(idx+deleteCount <= b.Len())
|
||||
|
||||
cur := newCursorAtIndex(b.seq, idx)
|
||||
ch := newSequenceChunker(cur, nil, makeBlobLeafChunkFn(b.seq.valueReader()), newIndexedMetaSequenceChunkFn(BlobKind, b.seq.valueReader()), hashValueByte)
|
||||
for deleteCount > 0 {
|
||||
ch.Skip()
|
||||
deleteCount--
|
||||
}
|
||||
|
||||
for _, v := range data {
|
||||
ch.Append(v)
|
||||
}
|
||||
return newBlob(ch.Done(nil).(indexedSequence))
|
||||
}
|
||||
|
||||
// Collection interface
|
||||
func (b Blob) Len() uint64 {
|
||||
return b.seq.numLeaves()
|
||||
@@ -134,7 +155,7 @@ func (cbr *BlobReader) updateReader() {
|
||||
cbr.currentReader.Seek(int64(cbr.cursor.idx), 0)
|
||||
}
|
||||
|
||||
func newBlobLeafChunkFn(vr ValueReader) makeChunkFn {
|
||||
func makeBlobLeafChunkFn(vr ValueReader) makeChunkFn {
|
||||
return func(items []sequenceItem) (Collection, orderedKey, uint64) {
|
||||
buff := make([]byte, len(items))
|
||||
|
||||
@@ -156,7 +177,7 @@ func NewBlob(r io.Reader) Blob {
|
||||
}
|
||||
|
||||
func NewStreamingBlob(r io.Reader, vrw ValueReadWriter) Blob {
|
||||
sc := newEmptySequenceChunker(vrw, newBlobLeafChunkFn(nil), newIndexedMetaSequenceChunkFn(BlobKind, nil), func(item sequenceItem, rv *rollingValueHasher) {
|
||||
sc := newEmptySequenceChunker(vrw, makeBlobLeafChunkFn(nil), newIndexedMetaSequenceChunkFn(BlobKind, nil), func(item sequenceItem, rv *rollingValueHasher) {
|
||||
rv.HashByte(item.(byte))
|
||||
})
|
||||
|
||||
|
||||
@@ -204,3 +204,30 @@ func TestBlobFromReaderThatReturnsDataAndError(t *testing.T) {
|
||||
assert.True(bytes.Equal(actual.Bytes(), tr.buf.Bytes()))
|
||||
assert.Equal(byte(2), actual.Bytes()[len(actual.Bytes())-1])
|
||||
}
|
||||
|
||||
func TestBlobSplice(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
blob := NewEmptyBlob()
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
blob = blob.Splice(0, 0, []byte("I'll do anything"))
|
||||
buf.Reset()
|
||||
buf.ReadFrom(blob.Reader())
|
||||
assert.Equal(buf.String(), "I'll do anything")
|
||||
|
||||
blob = blob.Splice(16, 0, []byte(" for arv"))
|
||||
buf.Reset()
|
||||
buf.ReadFrom(blob.Reader())
|
||||
assert.Equal(buf.String(), "I'll do anything for arv")
|
||||
|
||||
blob = blob.Splice(0, 0, []byte("Yes, "))
|
||||
buf.Reset()
|
||||
buf.ReadFrom(blob.Reader())
|
||||
assert.Equal(buf.String(), "Yes, I'll do anything for arv")
|
||||
|
||||
blob = blob.Splice(5, 20, []byte("it's hard to satisfy"))
|
||||
buf.Reset()
|
||||
buf.ReadFrom(blob.Reader())
|
||||
assert.Equal(buf.String(), "Yes, it's hard to satisfy arv")
|
||||
}
|
||||
|
||||
@@ -58,6 +58,10 @@ func hashValueBytes(item sequenceItem, rv *rollingValueHasher) {
|
||||
rv.HashValue(item.(Value))
|
||||
}
|
||||
|
||||
func hashValueByte(item sequenceItem, rv *rollingValueHasher) {
|
||||
rv.HashByte(item.(byte))
|
||||
}
|
||||
|
||||
func newRollingValueHasher() *rollingValueHasher {
|
||||
pattern, window := chunkingConfig()
|
||||
rv := &rollingValueHasher{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@attic/noms",
|
||||
"license": "Apache-2.0",
|
||||
"version": "56.0.1",
|
||||
"version": "56.1.0",
|
||||
"description": "Noms JS SDK",
|
||||
"repository": "https://github.com/attic-labs/noms",
|
||||
"main": "dist/commonjs/noms.js",
|
||||
|
||||
@@ -258,5 +258,22 @@ suite('Blob', () => {
|
||||
assert.strictEqual(b2, b3);
|
||||
assert.isTrue(equals(b1, b2));
|
||||
});
|
||||
|
||||
test('Blob Splicing', async () => {
|
||||
const a = new Blob(new Uint8Array([1, 2, 3]));
|
||||
await assertReadFull(new Uint8Array([1, 2, 3]), a.getReader());
|
||||
|
||||
const b = await a.splice(3, 0, new Uint8Array([4, 5, 6]));
|
||||
await assertReadFull(new Uint8Array([1, 2, 3, 4]), b.getReader());
|
||||
|
||||
const c = await b.splice(1, 2, new Uint8Array([23]));
|
||||
await assertReadFull(new Uint8Array([1, 23, 4, 5, 6]), c.getReader());
|
||||
|
||||
const d = await c.splice(0, 0, new Uint8Array([254, 255, 0]));
|
||||
await assertReadFull(new Uint8Array([254, 255, 0, 1, 23, 4, 5, 6]), d.getReader());
|
||||
|
||||
const e = await d.splice(6, 2, new Uint8Array([]));
|
||||
await assertReadFull(new Uint8Array([254, 255, 0, 1, 23, 4]), e.getReader());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,6 +8,7 @@ import * as Bytes from './bytes.js';
|
||||
import Collection from './collection.js';
|
||||
import RollingValueHasher from './rolling-value-hasher.js';
|
||||
import SequenceChunker from './sequence-chunker.js';
|
||||
import {chunkSequence} from './sequence-chunker.js';
|
||||
import type {EqualsFn} from './edit-distance.js';
|
||||
import type {ValueReader, ValueReadWriter} from './value-store.js';
|
||||
import type {makeChunkFn} from './sequence-chunker.js';
|
||||
@@ -17,6 +18,7 @@ import {OrderedKey, newIndexedMetaSequenceChunkFn} from './meta-sequence.js';
|
||||
import {SequenceCursor} from './sequence.js';
|
||||
import {blobType} from './type.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {hashValueByte} from './rolling-value-hasher.js';
|
||||
|
||||
export default class Blob extends Collection<IndexedSequence> {
|
||||
constructor(bytes: Uint8Array) {
|
||||
@@ -39,6 +41,14 @@ export default class Blob extends Collection<IndexedSequence> {
|
||||
get length(): number {
|
||||
return this.sequence.numLeaves;
|
||||
}
|
||||
|
||||
splice(idx: number, deleteCount: number, insert: Uint8Array): Promise<Blob> {
|
||||
const vr = this.sequence.vr;
|
||||
return this.sequence.newCursorAt(idx).then(cursor =>
|
||||
chunkSequence(cursor, Array.from(insert), deleteCount, newBlobLeafChunkFn(vr),
|
||||
newIndexedMetaSequenceChunkFn(Kind.Blob, vr, null),
|
||||
hashValueByte)).then(s => Blob.fromSequence(s));
|
||||
}
|
||||
}
|
||||
|
||||
export class BlobReader {
|
||||
|
||||
@@ -43,6 +43,10 @@ export function hashValueBytes(item: Value, rv: RollingValueHasher) {
|
||||
rv.hashValue(item);
|
||||
}
|
||||
|
||||
export function hashValueByte(b: number, rv: RollingValueHasher) {
|
||||
rv.hashByte(b);
|
||||
}
|
||||
|
||||
export default class RollingValueHasher {
|
||||
bz: BuzHash;
|
||||
enc: ValueEncoder;
|
||||
|
||||
Reference in New Issue
Block a user