diff --git a/chunks/aws_store.go b/chunks/aws_store.go index e56927e8dd..aac7c55693 100644 --- a/chunks/aws_store.go +++ b/chunks/aws_store.go @@ -164,7 +164,7 @@ func (w *awsChunkWriter) Close() error { _, err = w.store.awsSvc.HeadObject(&s3.HeadObjectInput{ Bucket: bucket, - Key: key, + Key: key, }) if err == nil { // Nothing to do, s3 already has this chunk @@ -193,14 +193,17 @@ type awsStoreFlags struct { awsSecret *string } -func awsFlags() awsStoreFlags { +func awsFlags(prefix string) awsStoreFlags { + if prefix != "" { + prefix += "-" + } return awsStoreFlags{ - flag.String("aws-store-bucket", "", "aws bucket to create an aws-based chunkstore in"), - flag.String("aws-store-dynamo-table", "noms-root", "dynamodb table to store the root of the aws-based chunkstore in"), - flag.String("aws-store-region", "us-west-2", "aws region to put the aws-based chunkstore in"), - flag.Bool("aws-store-auth-from-env", false, "creates the aws-based chunkstore from authorization found in the environment. This is typically used in production to get keys from IAM profile. If not specified, then -aws-store-key and aws-store-secret must be specified instead"), - flag.String("aws-store-key", "", "aws key to use to create the aws-based chunkstore"), - flag.String("aws-store-secret", "", "aws secret to use to create the aws-based chunkstore"), + flag.String(prefix+"aws-store-bucket", "", "aws bucket to create an aws-based chunkstore in"), + flag.String(prefix+"aws-store-dynamo-table", "noms-root", "dynamodb table to store the root of the aws-based chunkstore in"), + flag.String(prefix+"aws-store-region", "us-west-2", "aws region to put the aws-based chunkstore in"), + flag.Bool(prefix+"aws-store-auth-from-env", false, "creates the aws-based chunkstore from authorization found in the environment. This is typically used in production to get keys from IAM profile. If not specified, then -aws-store-key and aws-store-secret must be specified instead"), + flag.String(prefix+"aws-store-key", "", "aws key to use to create the aws-based chunkstore"), + flag.String(prefix+"aws-store-secret", "", "aws secret to use to create the aws-based chunkstore"), } } diff --git a/chunks/chunk_store.go b/chunks/chunk_store.go index 218a978bab..1f3c735307 100644 --- a/chunks/chunk_store.go +++ b/chunks/chunk_store.go @@ -39,12 +39,12 @@ type Flags struct { nop nopStoreFlags } -func NewFlags() Flags { +func NewFlags(prefix string) Flags { return Flags{ - awsFlags(), - fileFlags(), - memoryFlags(), - nopFlags(), + awsFlags(prefix), + fileFlags(prefix), + memoryFlags(prefix), + nopFlags(prefix), } } diff --git a/chunks/file_store.go b/chunks/file_store.go index 708271d019..78971f0b9c 100644 --- a/chunks/file_store.go +++ b/chunks/file_store.go @@ -150,10 +150,13 @@ type fileStoreFlags struct { root *string } -func fileFlags() fileStoreFlags { +func fileFlags(prefix string) fileStoreFlags { + if prefix != "" { + prefix += "-" + } return fileStoreFlags{ - flag.String("file-store", "", "directory to use for a file-based chunkstore"), - flag.String("file-store-root", "root", "filename which holds the root ref in the filestore"), + flag.String(prefix+"file-store", "", "directory to use for a file-based chunkstore"), + flag.String(prefix+"file-store-root", "root", "filename which holds the root ref in the filestore"), } } diff --git a/chunks/memory_store.go b/chunks/memory_store.go index 0915d50dff..c41febe2a4 100644 --- a/chunks/memory_store.go +++ b/chunks/memory_store.go @@ -68,9 +68,12 @@ type memoryStoreFlags struct { use *bool } -func memoryFlags() memoryStoreFlags { +func memoryFlags(prefix string) memoryStoreFlags { + if prefix != "" { + prefix += "-" + } return memoryStoreFlags{ - flag.Bool("memory-store", false, "use a memory-based (ephemeral, and private to this application) chunkstore"), + flag.Bool(prefix+"memory-store", false, "use a memory-based (ephemeral, and private to this application) chunkstore"), } } diff --git a/chunks/nop_store.go b/chunks/nop_store.go index 75ff0df5bb..c430edabc9 100644 --- a/chunks/nop_store.go +++ b/chunks/nop_store.go @@ -40,9 +40,12 @@ type nopStoreFlags struct { use *bool } -func nopFlags() nopStoreFlags { +func nopFlags(prefix string) nopStoreFlags { + if prefix != "" { + prefix += "-" + } return nopStoreFlags{ - flag.Bool("nop-store", false, "use a /dev/null-esque chunkstore"), + flag.Bool(prefix+"nop-store", false, "use a /dev/null-esque chunkstore"), } } diff --git a/clients/pitchmap/index/index.go b/clients/pitchmap/index/index.go index dbb750dcd5..ea5f5a0cba 100644 --- a/clients/pitchmap/index/index.go +++ b/clients/pitchmap/index/index.go @@ -145,7 +145,7 @@ func getIndex(input types.List) MapOfStringToListOfPitch { } func main() { - csFlags := chunks.NewFlags() + csFlags := chunks.NewFlags("") flag.Parse() cs := csFlags.CreateStore() diff --git a/clients/server/server.go b/clients/server/server.go index 06eaf5b333..c5db34faac 100644 --- a/clients/server/server.go +++ b/clients/server/server.go @@ -80,7 +80,7 @@ func (s server) handleGetDataset(w http.ResponseWriter, id string) { } func main() { - flags := chunks.NewFlags() + flags := chunks.NewFlags("") flag.Parse() cs := flags.CreateStore() diff --git a/dataset/dataset.go b/dataset/dataset.go index 413675872a..dfb0870d98 100644 --- a/dataset/dataset.go +++ b/dataset/dataset.go @@ -30,7 +30,7 @@ type datasetFlags struct { func Flags() datasetFlags { return datasetFlags{ - chunks.NewFlags(), + chunks.NewFlags(""), flag.String("dataset-id", "", "dataset id to store data for"), } } @@ -44,7 +44,7 @@ func (f datasetFlags) CreateDataset() *Dataset { return nil } - // Blech, kinda sucks to typecast to RootTracker, but we know that all the implementations of ChunkStore that implement it. + // Blech, kinda sucks to typecast to RootTracker, but we know that all the implementations of ChunkStore implement it. commitDataStore := datas.NewDataStore(cs, cs.(chunks.RootTracker)) ds := NewDataset(commitDataStore, *f.datasetID) diff --git a/sync/pull.go b/sync/pull.go new file mode 100644 index 0000000000..5cafa31c7b --- /dev/null +++ b/sync/pull.go @@ -0,0 +1,77 @@ +package sync + +import ( + "errors" + "fmt" + "io/ioutil" + + "github.com/attic-labs/noms/chunks" + "github.com/attic-labs/noms/datas" + "github.com/attic-labs/noms/dataset" + "github.com/attic-labs/noms/ref" + "github.com/attic-labs/noms/types" + "github.com/attic-labs/noms/walk" +) + +func validateRefAsSetOfCommit(r ref.Ref, cs chunks.ChunkSource) (v types.Value, err error) { + v, err = types.ReadValue(r, cs) + if v == nil { + return nil, errors.New("BAH") + } else if err != nil { + return nil, err + } + // TODO: Replace this weird recover stuff below once we have a way to determine if + // a Value is an instance of a custom struct type. + err = fmt.Errorf("%+v is not a SetOfCommit", v) + defer func() { recover() }() + datas.SetOfCommitFromVal(v) // If this panics the return value will be the error above + return v, nil +} + +// DiffHeadsByRef takes two Refs, validates that both refer to Heads in the given ChunkSource, and then returns the set of Refs that can be reached from 'big', but not 'small'. +func DiffHeadsByRef(small, big ref.Ref, cs chunks.ChunkSource) ([]ref.Ref, error) { + if _, err := validateRefAsSetOfCommit(small, cs); err != nil { + return nil, err + } + if _, err := validateRefAsSetOfCommit(big, cs); err != nil { + return nil, err + } + return walk.Difference(small, big, cs), nil + +} + +// CopyChunks reads each Ref in refs out of src and writes it into sink. +func CopyChunks(refs []ref.Ref, src chunks.ChunkSource, sink chunks.ChunkSink) error { + for _, ref := range refs { + reader, err := src.Get(ref) + if reader == nil { + return fmt.Errorf("Got back nil for %+v", ref) + } else if err != nil { + return err + } + // It seems like there should be some better way to connect a reader and a writer. + data, err := ioutil.ReadAll(reader) + if err != nil { + return err + } + writer := sink.Put() + defer writer.Close() + n, err := writer.Write(data) + if err != nil { + return err + } + if len(data) != n { + return fmt.Errorf("Should have read %d bytes; only read %d.", n, len(data)) + } + } + return nil +} + +// SetNewHeads takes the Ref of the desired new Heads of ds, the chunk for which should already exist in the Dataset. It validates that the Ref points to an existing chunk that decodes to the correct type of value and then commits it to ds. +func SetNewHeads(newHeadRef ref.Ref, ds dataset.Dataset) (dataset.Dataset, error) { + v, err := validateRefAsSetOfCommit(newHeadRef, ds) + if err != nil { + return ds, err + } + return ds.Commit(datas.SetOfCommitFromVal(v)), nil +} diff --git a/sync/pull_test.go b/sync/pull_test.go new file mode 100644 index 0000000000..3197265d0f --- /dev/null +++ b/sync/pull_test.go @@ -0,0 +1,60 @@ +package sync + +import ( + "testing" + + "github.com/attic-labs/noms/chunks" + "github.com/attic-labs/noms/datas" + "github.com/attic-labs/noms/dataset" + "github.com/attic-labs/noms/types" + "github.com/stretchr/testify/assert" +) + +func createTestDataset(name string) dataset.Dataset { + var t chunks.ChunkStore = &chunks.TestStore{} + return dataset.NewDataset(datas.NewDataStore(t, t.(chunks.RootTracker)), name) + +} + +func TestValidateRef(t *testing.T) { + cs := &chunks.TestStore{} + r, err := types.WriteValue(types.Bool(true), cs) + assert.NoError(t, err) + + _, err = validateRefAsSetOfCommit(r, cs) + assert.Error(t, err) +} + +func TestPull(t *testing.T) { + assert := assert.New(t) + + puller := createTestDataset("puller") + pullee := createTestDataset("pullee") + + commitValue := func(v types.Value, ds dataset.Dataset) dataset.Dataset { + return ds.Commit( + datas.NewSetOfCommit().Insert( + datas.NewCommit().SetParents(ds.Heads().NomsValue()).SetValue(v))) + } + + initialValue := types.NewMap( + types.NewString("first"), types.NewList(), + types.NewString("second"), types.NewList(types.Int32(2))) + + pullee = commitValue(initialValue, pullee) + puller = commitValue(initialValue, puller) + + updatedValue := initialValue.Set( + types.NewString("third"), types.NewList(types.Int32(1))) + + pullee = commitValue(updatedValue, pullee) + + refs, err := DiffHeadsByRef(puller.Heads().Ref(), pullee.Heads().Ref(), pullee) + assert.NoError(err) + assert.NoError(CopyChunks(refs, pullee, puller)) + puller, err = SetNewHeads(pullee.Heads().Ref(), puller) + assert.NoError(err) + assert.Equal(pullee.Heads().Ref(), puller.Heads().Ref()) + assert.True(pullee.Heads().Equals(puller.Heads())) + +}