Make all ChunkStore impls crash on Write() after Close/Ref()

I incorrectly said this was already true in a previous patch, so
make it true now!

Also, updated file_store_test.go and s3_store_test.go to use stretchr
testify suites so they can share setup and teardown code.

Fixes issue #5 as well
This commit is contained in:
Chris Masone
2015-07-10 11:30:32 -07:00
parent 96f21c4a60
commit 6c3047cf03
5 changed files with 168 additions and 169 deletions

View File

@@ -75,9 +75,14 @@ func (f FileStore) Get(ref ref.Ref) (io.ReadCloser, error) {
}
func (f FileStore) Put() ChunkWriter {
fyle, err := ioutil.TempFile(os.TempDir(), "")
Chk.NoError(err)
h := ref.NewHash()
return &fileChunkWriter{
root: f.dir,
hash: ref.NewHash(),
file: fyle,
writer: io.MultiWriter(fyle, h),
hash: h,
rename: f.rename,
}
}
@@ -91,12 +96,7 @@ type fileChunkWriter struct {
}
func (w *fileChunkWriter) Write(data []byte) (int, error) {
if w.file == nil {
f, err := ioutil.TempFile(os.TempDir(), "")
Chk.NoError(err)
w.file = f
w.writer = io.MultiWriter(f, w.hash)
}
Chk.NotNil(w.file, "Write() cannot be called after Ref() or Close().")
return w.writer.Write(data)
}

View File

@@ -7,149 +7,161 @@ import (
"testing"
"github.com/attic-labs/noms/ref"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
func TestFileStorePut(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
defer os.Remove(dir)
assert.NoError(err)
func TestFileStoreTestSuite(t *testing.T) {
suite.Run(t, new(FileStoreTestSuite))
}
type FileStoreTestSuite struct {
suite.Suite
Dir string
Store FileStore
}
func (suite *FileStoreTestSuite) SetupTest() {
var err error
suite.Dir, err = ioutil.TempDir(os.TempDir(), "")
suite.NoError(err)
suite.Store = NewFileStore(suite.Dir, "root")
}
func (suite *FileStoreTestSuite) TearDownTest() {
os.Remove(suite.Dir)
}
func (suite *FileStoreTestSuite) TestFileStorePut() {
input := "abc"
s := NewFileStore(dir, "root")
w := s.Put()
_, err = w.Write([]byte(input))
assert.NoError(err)
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
ref, err := w.Ref()
assert.NoError(err)
suite.NoError(err)
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", ref.String())
suite.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", ref.String())
// There should also be a file there now...
p := path.Join(dir, "sha1", "a9", "99", ref.String())
p := path.Join(suite.Dir, "sha1", "a9", "99", ref.String())
f, err := os.Open(p)
assert.NoError(err)
suite.NoError(err)
data, err := ioutil.ReadAll(f)
assert.NoError(err)
assert.Equal(input, string(data))
suite.NoError(err)
suite.Equal(input, string(data))
// And reading it via the API should work...
assertInputInStore(input, ref, s, assert)
assertInputInStore(input, ref, suite.Store, suite.Assert())
}
func TestFileStorePutWithRefAfterClose(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
defer os.Remove(dir)
assert.NoError(err)
func (suite *FileStoreTestSuite) TestFileStoreWriteAfterCloseFails() {
input := "abc"
s := NewFileStore(dir, "root")
w := s.Put()
_, err = w.Write([]byte(input))
assert.NoError(err)
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
assert.NoError(w.Close())
ref, err := w.Ref() // Ref() after Close() should work...
assert.NoError(err)
// And reading the data via the API should work...
assertInputInStore(input, ref, &s, assert)
suite.NoError(w.Close())
suite.Panics(func() { w.Write([]byte(input)) }, "Write() after Close() should barf!")
}
func TestFileStorePutWithMultipleRef(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
defer os.Remove(dir)
assert.NoError(err)
func (suite *FileStoreTestSuite) TestFileStoreWriteAfterRefFails() {
input := "abc"
s := NewFileStore(dir, "root")
w := s.Put()
_, err = w.Write([]byte(input))
assert.NoError(err)
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
_, _ = w.Ref()
assert.NoError(err)
ref, err := w.Ref() // Multiple calls to Ref() should work...
assert.NoError(err)
suite.NoError(err)
suite.Panics(func() { w.Write([]byte(input)) }, "Write() after Close() should barf!")
}
func (suite *FileStoreTestSuite) TestFileStorePutWithRefAfterClose() {
input := "abc"
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
suite.NoError(w.Close())
ref, err := w.Ref() // Ref() after Close() should work...
suite.NoError(err)
// And reading the data via the API should work...
assertInputInStore(input, ref, &s, assert)
assertInputInStore(input, ref, suite.Store, suite.Assert())
}
func TestFileStoreRoot(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
defer os.Remove(dir)
assert.NoError(err)
func (suite *FileStoreTestSuite) TestFileStorePutWithMultipleRef() {
input := "abc"
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
s := NewFileStore(dir, "root")
oldRoot := s.Root()
assert.Equal(oldRoot, ref.Ref{})
_, _ = w.Ref()
suite.NoError(err)
ref, err := w.Ref() // Multiple calls to Ref() should work...
suite.NoError(err)
// And reading the data via the API should work...
assertInputInStore(input, ref, suite.Store, suite.Assert())
}
func (suite *FileStoreTestSuite) TestFileStoreRoot() {
oldRoot := suite.Store.Root()
suite.Equal(oldRoot, ref.Ref{})
// Root file should be absent
f, err := os.Open(path.Join(dir, "root"))
assert.True(os.IsNotExist(err))
f, err := os.Open(path.Join(suite.Dir, "root"))
suite.True(os.IsNotExist(err))
bogusRoot, err := ref.Parse("sha1-81c870618113ba29b6f2b396ea3a69c6f1d626c5") // sha1("Bogus, Dude")
assert.NoError(err)
suite.NoError(err)
newRoot, err := ref.Parse("sha1-907d14fb3af2b0d4f18c2d46abe8aedce17367bd") // sha1("Hello, World")
assert.NoError(err)
suite.NoError(err)
// Try to update root with bogus oldRoot
result := s.UpdateRoot(newRoot, bogusRoot)
assert.False(result)
result := suite.Store.UpdateRoot(newRoot, bogusRoot)
suite.False(result)
// Root file should now be there, but should be empty
f, err = os.Open(path.Join(dir, "root"))
assert.NoError(err)
f, err = os.Open(path.Join(suite.Dir, "root"))
suite.NoError(err)
input, err := ioutil.ReadAll(f)
assert.Equal(len(input), 0)
suite.Equal(len(input), 0)
// Now do a valid root update
result = s.UpdateRoot(newRoot, oldRoot)
assert.True(result)
result = suite.Store.UpdateRoot(newRoot, oldRoot)
suite.True(result)
// Root file should now contain "Hello, World" sha1
f, err = os.Open(path.Join(dir, "root"))
assert.NoError(err)
f, err = os.Open(path.Join(suite.Dir, "root"))
suite.NoError(err)
input, err = ioutil.ReadAll(f)
assert.NoError(err)
assert.Equal("sha1-907d14fb3af2b0d4f18c2d46abe8aedce17367bd", string(input))
suite.NoError(err)
suite.Equal("sha1-907d14fb3af2b0d4f18c2d46abe8aedce17367bd", string(input))
}
func TestFileStorePutExisting(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
defer os.Remove(dir)
assert.NoError(err)
func (suite *FileStoreTestSuite) TestFileStorePutExisting() {
input := "abc"
s := NewFileStore(dir, "root")
renameCount := 0
s.rename = func(oldPath, newPath string) error {
renameCount += 1
suite.Store.rename = func(oldPath, newPath string) error {
renameCount++
return os.Rename(oldPath, newPath)
}
write := func() {
w := s.Put()
_, err = w.Write([]byte(input))
assert.NoError(err)
_, err := w.Ref()
assert.NoError(err)
w := suite.Store.Put()
_, err := w.Write([]byte(input))
suite.NoError(err)
_, err = w.Ref()
suite.NoError(err)
}
write()
assert.Equal(1, renameCount)
suite.Equal(1, renameCount)
write()
// Shouldn't have written the second time.
assert.Equal(1, renameCount)
suite.Equal(1, renameCount)
}

View File

@@ -65,8 +65,6 @@ func (w *memoryChunkWriter) Ref() (ref.Ref, error) {
}
func (w *memoryChunkWriter) Close() error {
// Not really necessary, but this will at least free memory and cause subsequent operations to crash.
// BUG 17: Make this method consistent with other ChunkStore implementations.
*w = memoryChunkWriter{}
return nil
}

View File

@@ -122,9 +122,14 @@ func (s S3Store) Get(ref ref.Ref) (io.ReadCloser, error) {
}
func (s S3Store) Put() ChunkWriter {
f, err := ioutil.TempFile(os.TempDir(), "")
Chk.NoError(err)
h := ref.NewHash()
return &s3ChunkWriter{
store: s,
hash: ref.NewHash(),
store: s,
file: f,
writer: io.MultiWriter(f, h),
hash: h,
}
}
@@ -136,12 +141,7 @@ type s3ChunkWriter struct {
}
func (w *s3ChunkWriter) Write(data []byte) (int, error) {
if w.file == nil {
f, err := ioutil.TempFile(os.TempDir(), "")
Chk.NoError(err)
w.file = f
w.writer = io.MultiWriter(f, w.hash)
}
Chk.NotNil(w.file, "Write() cannot be called after Ref() or Close().")
return w.writer.Write(data)
}

View File

@@ -10,9 +10,27 @@ import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/dynamodb"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
func TestS3StoreTestSuite(t *testing.T) {
suite.Run(t, new(S3StoreTestSuite))
}
type S3StoreTestSuite struct {
suite.Suite
Store S3Store
}
func (suite *S3StoreTestSuite) SetupTest() {
suite.Store = S3Store{
"bucket",
"table",
&mockS3{},
nil,
}
}
type mockS3 map[string][]byte
func (m *mockS3) GetObject(input *s3.GetObjectInput) (*s3.GetObjectOutput, error) {
@@ -32,90 +50,63 @@ func (m *mockS3) PutObject(input *s3.PutObjectInput) (*s3.PutObjectOutput, error
return nil, nil
}
func TestS3StorePut(t *testing.T) {
assert := assert.New(t)
func (suite *S3StoreTestSuite) TestS3StorePut() {
input := "abc"
s := S3Store{
"bucket",
"table",
&mockS3{},
nil,
}
w := s.Put()
w := suite.Store.Put()
_, err := w.Write([]byte(input))
assert.NoError(err)
suite.NoError(err)
r1, err := w.Ref()
assert.NoError(err)
suite.NoError(err)
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
suite.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
// And reading it via the API should work...
assertInputInStore(input, r1, s, assert)
assertInputInStore(input, r1, suite.Store, suite.Assert())
// Reading a non-existing ref fails
hash := ref.NewHash()
hash.Write([]byte("Non-existent"))
_, err = s.Get(ref.FromHash(hash))
assert.Error(err)
_, err = suite.Store.Get(ref.FromHash(hash))
suite.Error(err)
}
func TestS3StorePutRefAfterClose(t *testing.T) {
assert := assert.New(t)
func (suite *S3StoreTestSuite) TestS3StorePutRefAfterClose() {
input := "abc"
s := S3Store{
"bucket",
"table",
&mockS3{},
nil,
}
w := s.Put()
w := suite.Store.Put()
_, err := w.Write([]byte(input))
assert.NoError(err)
suite.NoError(err)
assert.NoError(w.Close())
suite.NoError(w.Close())
r1, err := w.Ref()
assert.NoError(err)
suite.NoError(err)
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
suite.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
// And reading it via the API should work...
assertInputInStore(input, r1, s, assert)
assertInputInStore(input, r1, suite.Store, suite.Assert())
}
func TestS3StorePutMultiRef(t *testing.T) {
assert := assert.New(t)
func (suite *S3StoreTestSuite) TestS3StorePutMultiRef() {
input := "abc"
s := S3Store{
"bucket",
"table",
&mockS3{},
nil,
}
w := s.Put()
w := suite.Store.Put()
_, err := w.Write([]byte(input))
assert.NoError(err)
suite.NoError(err)
_, _ = w.Ref()
r1, err := w.Ref()
assert.NoError(err)
suite.NoError(err)
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
suite.Equal("sha1-a9993e364706816aba3e25717850c26c9cd0d89d", r1.String())
// And reading it via the API should work...
assertInputInStore(input, r1, s, assert)
assertInputInStore(input, r1, suite.Store, suite.Assert())
}
type mockAWSError string
@@ -153,38 +144,36 @@ func (m *mockDDB) PutItem(input *dynamodb.PutItemInput) (*dynamodb.PutItemOutput
return &dynamodb.PutItemOutput{}, nil
}
func TestS3StoreRoot(t *testing.T) {
assert := assert.New(t)
func (suite *S3StoreTestSuite) TestS3StoreRoot() {
m := mockDDB("")
s := S3Store{
suite.Store = S3Store{
"bucket",
"table",
nil,
&m,
}
oldRoot := s.Root()
assert.Equal(oldRoot, ref.Ref{})
oldRoot := suite.Store.Root()
suite.Equal(oldRoot, ref.Ref{})
bogusRoot, err := ref.Parse("sha1-81c870618113ba29b6f2b396ea3a69c6f1d626c5") // sha1("Bogus, Dude")
assert.NoError(err)
suite.NoError(err)
newRoot, err := ref.Parse("sha1-907d14fb3af2b0d4f18c2d46abe8aedce17367bd") // sha1("Hello, World")
assert.NoError(err)
suite.NoError(err)
// Try to update root with bogus oldRoot
result := s.UpdateRoot(newRoot, bogusRoot)
assert.False(result)
assert.Equal(ref.Ref{}, s.Root())
result := suite.Store.UpdateRoot(newRoot, bogusRoot)
suite.False(result)
suite.Equal(ref.Ref{}, suite.Store.Root())
// No do a valid update
result = s.UpdateRoot(newRoot, oldRoot)
assert.True(result)
assert.Equal(s.Root(), newRoot)
result = suite.Store.UpdateRoot(newRoot, oldRoot)
suite.True(result)
suite.Equal(suite.Store.Root(), newRoot)
// Now that there is a valid root, try to start a new lineage
result = s.UpdateRoot(bogusRoot, ref.Ref{})
assert.False(result)
assert.Equal(s.Root(), newRoot)
result = suite.Store.UpdateRoot(bogusRoot, ref.Ref{})
suite.False(result)
suite.Equal(suite.Store.Root(), newRoot)
}