Remove spec GetDataset/GetDatabase/GetPath, update all clients (#2815)

These are now ForDataset/ForDatabase/ForPath.
This commit is contained in:
Ben Kalman
2016-11-09 17:34:39 -08:00
committed by GitHub
parent 45a36a46e9
commit cdfbee1b3d
21 changed files with 314 additions and 775 deletions

View File

@@ -23,26 +23,26 @@ func TestNomsCommit(t *testing.T) {
suite.Run(t, &nomsCommitTestSuite{})
}
func (s *nomsCommitTestSuite) setupDataset(name string, doCommit bool) (db datas.Database, ds datas.Dataset, dsStr string, ref types.Ref) {
func (s *nomsCommitTestSuite) setupDataset(name string, doCommit bool) (sp spec.Spec, ref types.Ref) {
var err error
dsStr = spec.CreateValueSpecString("ldb", s.LdbDir, name)
db, ds, err = spec.GetDataset(dsStr)
sp, err = spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, name))
s.NoError(err)
v := types.String("testcommit")
ref = db.WriteValue(v)
ref = sp.GetDatabase().WriteValue(v)
if doCommit {
ds, err = db.CommitValue(ds, v)
_, err = sp.GetDatabase().CommitValue(sp.GetDataset(), v)
s.NoError(err)
}
return
}
func (s *nomsCommitTestSuite) TestNomsCommitReadPathFromStdin() {
db, ds, dsStr, ref := s.setupDataset("commitTestStdin", false)
defer db.Close()
sp, ref := s.setupDataset("commitTestStdin", false)
defer sp.Close()
_, ok := ds.MaybeHead()
_, ok := sp.GetDataset().MaybeHead()
s.False(ok, "should not have a commit")
oldStdin := os.Stdin
@@ -58,13 +58,14 @@ func (s *nomsCommitTestSuite) TestNomsCommitReadPathFromStdin() {
stdinWriter.Write([]byte("#" + ref.TargetHash().String() + "\n"))
stdinWriter.Close()
}()
stdoutString, stderrString := s.MustRun(main, []string{"commit", dsStr})
stdoutString, stderrString := s.MustRun(main, []string{"commit", sp.Spec})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
db, ds, err = spec.GetDataset(dsStr)
s.NoError(err)
commit, ok := ds.MaybeHead()
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
commit, ok := sp.GetDataset().MaybeHead()
s.True(ok, "should have a commit now")
value := commit.Get(datas.ValueField)
s.True(value.Hash() == ref.TargetHash(), "commit.value hash == writevalue hash")
@@ -74,19 +75,20 @@ func (s *nomsCommitTestSuite) TestNomsCommitReadPathFromStdin() {
}
func (s *nomsCommitTestSuite) TestNomsCommitToDatasetWithoutHead() {
db, ds, dsStr, ref := s.setupDataset("commitTest", false)
defer db.Close()
sp, ref := s.setupDataset("commitTest", false)
defer sp.Close()
_, ok := ds.MaybeHead()
_, ok := sp.GetDataset().MaybeHead()
s.False(ok, "should not have a commit")
stdoutString, stderrString := s.MustRun(main, []string{"commit", "#" + ref.TargetHash().String(), dsStr})
stdoutString, stderrString := s.MustRun(main, []string{"commit", "#" + ref.TargetHash().String(), sp.Spec})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
db, ds, err := spec.GetDataset(dsStr)
s.NoError(err)
commit, ok := ds.MaybeHead()
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
commit, ok := sp.GetDataset().MaybeHead()
s.True(ok, "should have a commit now")
value := commit.Get(datas.ValueField)
s.True(value.Hash() == ref.TargetHash(), "commit.value hash == writevalue hash")
@@ -102,17 +104,17 @@ func structFieldEqual(old, now types.Struct, field string) bool {
}
func (s *nomsCommitTestSuite) runDuplicateTest(allowDuplicate bool) {
db, ds, dsStr, ref := s.setupDataset("commitTestDuplicate", true)
defer db.Close()
sp, ref := s.setupDataset("commitTestDuplicate", true)
defer sp.Close()
_, ok := ds.MaybeHeadValue()
_, ok := sp.GetDataset().MaybeHeadValue()
s.True(ok, "should have a commit")
cliOptions := []string{"commit"}
if allowDuplicate {
cliOptions = append(cliOptions, "--allow-dupe=1")
}
cliOptions = append(cliOptions, "#"+ref.TargetHash().String(), dsStr)
cliOptions = append(cliOptions, "#"+ref.TargetHash().String(), sp.Spec)
stdoutString, stderrString := s.MustRun(main, cliOptions)
s.Empty(stderrString)
@@ -123,9 +125,10 @@ func (s *nomsCommitTestSuite) runDuplicateTest(allowDuplicate bool) {
s.Contains(stdoutString, "Commit aborted")
}
db, ds, err := spec.GetDataset(dsStr)
s.NoError(err)
value, ok := ds.MaybeHeadValue()
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
value, ok := sp.GetDataset().MaybeHeadValue()
s.True(ok, "should still have a commit")
s.True(value.Hash() == ref.TargetHash(), "commit.value hash == previous commit hash")
}
@@ -136,17 +139,19 @@ func (s *nomsCommitTestSuite) TestNomsCommitDuplicate() {
}
func (s *nomsCommitTestSuite) TestNomsCommitMetadata() {
db, ds, dsStr, ref := s.setupDataset("commitTestMetadata", true)
metaOld := ds.Head().Get(datas.MetaField).(types.Struct)
sp, ref := s.setupDataset("commitTestMetadata", true)
defer sp.Close()
stdoutString, stderrString := s.MustRun(main, []string{"commit", "--allow-dupe=1", "--message=foo", "#" + ref.TargetHash().String(), dsStr})
metaOld := sp.GetDataset().Head().Get(datas.MetaField).(types.Struct)
stdoutString, stderrString := s.MustRun(main, []string{"commit", "--allow-dupe=1", "--message=foo", "#" + ref.TargetHash().String(), sp.Spec})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
db.Close()
db, ds, err := spec.GetDataset(dsStr)
s.NoError(err)
metaNew := ds.Head().Get(datas.MetaField).(types.Struct)
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
metaNew := sp.GetDataset().Head().Get(datas.MetaField).(types.Struct)
s.False(metaOld.Equals(metaNew), "meta didn't change")
s.False(structFieldEqual(metaOld, metaNew, "date"), "date didn't change")
@@ -154,53 +159,54 @@ func (s *nomsCommitTestSuite) TestNomsCommitMetadata() {
s.True(metaNew.Get("message").Equals(types.String("foo")), "message wasn't set")
metaOld = metaNew
stdoutString, stderrString = s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=message=bar", "--date=" + spec.CommitMetaDateFormat, "#" + ref.TargetHash().String(), dsStr})
stdoutString, stderrString = s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=message=bar", "--date=" + spec.CommitMetaDateFormat, "#" + ref.TargetHash().String(), sp.Spec})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
db.Close()
db, ds, err = spec.GetDataset(dsStr)
s.NoError(err)
metaNew = ds.Head().Get(datas.MetaField).(types.Struct)
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
metaNew = sp.GetDataset().Head().Get(datas.MetaField).(types.Struct)
s.False(metaOld.Equals(metaNew), "meta didn't change")
s.False(structFieldEqual(metaOld, metaNew, "date"), "date didn't change")
s.False(structFieldEqual(metaOld, metaNew, "message"), "message didn't change")
s.True(metaNew.Get("message").Equals(types.String("bar")), "message wasn't set")
db.Close()
}
func (s *nomsCommitTestSuite) TestNomsCommitHashNotFound() {
db, _, dsStr, _ := s.setupDataset("commitTestBadHash", true)
defer db.Close()
sp, _ := s.setupDataset("commitTestBadHash", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "#9ei6fbrs0ujo51vifd3f2eebufo4lgdu", dsStr})
s.MustRun(main, []string{"commit", "#9ei6fbrs0ujo51vifd3f2eebufo4lgdu", sp.Spec})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitMetadataBadDateFormat() {
db, _, dsStr, ref := s.setupDataset("commitTestMetadata", true)
defer db.Close()
sp, ref := s.setupDataset("commitTestMetadata", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--date=a", "#" + ref.TargetHash().String(), dsStr})
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--date=a", "#" + ref.TargetHash().String(), sp.Spec})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitInvalidMetadataPaths() {
db, _, dsStr, ref := s.setupDataset("commitTestMetadataPaths", true)
defer db.Close()
sp, ref := s.setupDataset("commitTestMetadataPaths", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta-p=#beef", "#" + ref.TargetHash().String(), dsStr})
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta-p=#beef", "#" + ref.TargetHash().String(), sp.Spec})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitInvalidMetadataFieldName() {
db, _, dsStr, ref := s.setupDataset("commitTestMetadataFields", true)
defer db.Close()
sp, ref := s.setupDataset("commitTestMetadataFields", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=_foo=bar", "#" + ref.TargetHash().String(), dsStr})
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=_foo=bar", "#" + ref.TargetHash().String(), sp.Spec})
})
}

View File

@@ -24,12 +24,11 @@ func TestNomsDiff(t *testing.T) {
}
func (s *nomsDiffTestSuite) TestNomsDiffOutputNotTruncated() {
datasetName := "diffTest"
str := spec.CreateValueSpecString("ldb", s.LdbDir, datasetName)
db, ds, err := spec.GetDataset(str)
sp, err := spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, "diffTest"))
s.NoError(err)
defer sp.Close()
ds, err = addCommit(ds, "first commit")
ds, err := addCommit(sp.GetDataset(), "first commit")
s.NoError(err)
r1 := spec.CreateValueSpecString("ldb", s.LdbDir, "#"+ds.HeadRef().TargetHash().String())
@@ -37,19 +36,18 @@ func (s *nomsDiffTestSuite) TestNomsDiffOutputNotTruncated() {
s.NoError(err)
r2 := spec.CreateValueSpecString("ldb", s.LdbDir, "#"+ds.HeadRef().TargetHash().String())
db.Close()
out, _ := s.MustRun(main, []string{"diff", r1, r2})
s.True(strings.HasSuffix(out, "\"second commit\"\n }\n"), out)
}
func (s *nomsDiffTestSuite) TestNomsDiffSummarize() {
datasetName := "diffSummarizeTest"
str := spec.CreateValueSpecString("ldb", s.LdbDir, datasetName)
store, ds, err := spec.GetDataset(str)
sp, err := spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, "diffSummarizeTest"))
s.NoError(err)
defer store.Close()
defer sp.Close()
ds, err = addCommit(ds, "first commit")
db := sp.GetDatabase()
ds, err := addCommit(sp.GetDataset(), "first commit")
s.NoError(err)
r1 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash())
@@ -64,11 +62,11 @@ func (s *nomsDiffTestSuite) TestNomsDiffSummarize() {
out, _ = s.MustRun(main, []string{"diff", "--summarize", r1 + ".value", r2 + ".value"})
s.NotContains(out, "Comparing commit values")
ds, err = store.CommitValue(ds, types.NewList(types.Number(1), types.Number(2), types.Number(3), types.Number(4)))
ds, err = db.CommitValue(ds, types.NewList(types.Number(1), types.Number(2), types.Number(3), types.Number(4)))
s.NoError(err)
r3 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) + ".value"
ds, err = store.CommitValue(ds, types.NewList(types.Number(1), types.Number(222), types.Number(4)))
ds, err = db.CommitValue(ds, types.NewList(types.Number(1), types.Number(222), types.Number(4)))
s.NoError(err)
r4 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) + ".value"

View File

@@ -25,27 +25,28 @@ type nomsLogTestSuite struct {
}
func testCommitInResults(s *nomsLogTestSuite, str string, i int) {
db, ds, err := spec.GetDataset(str)
sp, err := spec.ForDataset(str)
s.NoError(err)
ds, err = db.CommitValue(ds, types.Number(i))
defer sp.Close()
sp.GetDatabase().CommitValue(sp.GetDataset(), types.Number(i))
s.NoError(err)
commit := ds.Head()
db.Close()
commit := sp.GetDataset().Head()
res, _ := s.MustRun(main, []string{"log", str})
s.Contains(res, commit.Hash().String())
}
func (s *nomsLogTestSuite) TestNomsLog() {
datasetName := "dsTest"
str := spec.CreateValueSpecString("ldb", s.LdbDir, datasetName)
db, _, err := spec.GetDataset(str)
sp, err := spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, "dsTest"))
s.NoError(err)
defer sp.Close()
db.Close()
s.Panics(func() { s.MustRun(main, []string{"log", str}) })
sp.GetDatabase() // create the database
s.Panics(func() { s.MustRun(main, []string{"log", sp.Spec}) })
testCommitInResults(s, str, 1)
testCommitInResults(s, str, 2)
testCommitInResults(s, sp.Spec, 1)
testCommitInResults(s, sp.Spec, 2)
}
func addCommit(ds datas.Dataset, v string) (datas.Dataset, error) {
@@ -67,12 +68,13 @@ func mergeDatasets(ds1, ds2 datas.Dataset, v string) (datas.Dataset, error) {
}
func (s *nomsLogTestSuite) TestNArg() {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
dsName := "nArgTest"
db, err := spec.GetDatabase(str)
s.NoError(err)
ds := db.GetDataset(dsName)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
ds := sp.GetDatabase().GetDataset(dsName)
ds, err = addCommit(ds, "1")
h1 := ds.Head().Hash()
@@ -83,7 +85,6 @@ func (s *nomsLogTestSuite) TestNArg() {
ds, err = addCommit(ds, "3")
s.NoError(err)
h3 := ds.Head().Hash()
db.Close()
dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, dsName)
res, _ := s.MustRun(main, []string{"log", "-n1", dsSpec})
@@ -103,10 +104,11 @@ func (s *nomsLogTestSuite) TestNArg() {
}
func (s *nomsLogTestSuite) TestEmptyCommit() {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
ds := db.GetDataset("ds1")
meta := types.NewStruct("Meta", map[string]types.Value{
@@ -116,8 +118,8 @@ func (s *nomsLogTestSuite) TestEmptyCommit() {
ds, err = db.Commit(ds, types.String("1"), datas.CommitOptions{Meta: meta})
s.NoError(err)
db.Commit(ds, types.String("2"), datas.CommitOptions{})
db.Close()
ds, err = db.Commit(ds, types.String("2"), datas.CommitOptions{})
s.NoError(err)
dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "ds1")
res, _ := s.MustRun(main, []string{"log", "--show-value=false", dsSpec})
@@ -128,12 +130,13 @@ func (s *nomsLogTestSuite) TestEmptyCommit() {
}
func (s *nomsLogTestSuite) TestNomsGraph1() {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
b1 := db.GetDataset("b1")
b1, err = addCommit(b1, "1")
s.NoError(err)
b1, err = addCommit(b1, "2")
@@ -173,7 +176,6 @@ func (s *nomsLogTestSuite) TestNomsGraph1() {
b1, err = addCommit(b1, "7")
s.NoError(err)
b1.Database().Close()
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value=true", spec.CreateValueSpecString("ldb", s.LdbDir, "b1")})
s.Equal(graphRes1, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--show-value=false", spec.CreateValueSpecString("ldb", s.LdbDir, "b1")})
@@ -181,12 +183,13 @@ func (s *nomsLogTestSuite) TestNomsGraph1() {
}
func (s *nomsLogTestSuite) TestNomsGraph2() {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
ba := db.GetDataset("ba")
ba, err = addCommit(ba, "1")
s.NoError(err)
@@ -204,8 +207,6 @@ func (s *nomsLogTestSuite) TestNomsGraph2() {
_, err = mergeDatasets(ba, bc, "101")
s.NoError(err)
db.Close()
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value=true", spec.CreateValueSpecString("ldb", s.LdbDir, "ba")})
s.Equal(graphRes2, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--show-value=false", spec.CreateValueSpecString("ldb", s.LdbDir, "ba")})
@@ -213,9 +214,11 @@ func (s *nomsLogTestSuite) TestNomsGraph2() {
}
func (s *nomsLogTestSuite) TestNomsGraph3() {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
w := db.GetDataset("w")
@@ -246,7 +249,6 @@ func (s *nomsLogTestSuite) TestNomsGraph3() {
_, err = mergeDatasets(w, z, "2222-wz")
s.NoError(err)
db.Close()
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value=true", spec.CreateValueSpecString("ldb", s.LdbDir, "w")})
test.EqualsIgnoreHashes(s.T(), graphRes3, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--show-value=false", spec.CreateValueSpecString("ldb", s.LdbDir, "w")})
@@ -262,11 +264,11 @@ func (s *nomsLogTestSuite) TestTruncation() {
return types.NewList(nv...)
}
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
t := db.GetDataset("truncate")
t := sp.GetDatabase().GetDataset("truncate")
t, err = addCommit(t, "the first line")
s.NoError(err)
@@ -274,7 +276,6 @@ func (s *nomsLogTestSuite) TestTruncation() {
l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"}
_, err = addCommitWithValue(t, toNomsList(l))
s.NoError(err)
db.Close()
dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "truncate")
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value=true", dsSpec})

View File

@@ -84,22 +84,25 @@ func (s *nomsMergeTestSuite) TestNomsMerge_Success() {
}
func (s *nomsMergeTestSuite) setupMergeDataset(name string, data types.StructData, p types.Set) types.Ref {
db, ds, _ := spec.GetDataset(spec.CreateValueSpecString("ldb", s.LdbDir, name))
defer db.Close()
ds, err := db.Commit(ds, types.NewStruct("", data), datas.CommitOptions{Parents: p})
sp, err := spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, name))
s.NoError(err)
defer sp.Close()
ds := sp.GetDataset()
ds, err = sp.GetDatabase().Commit(ds, types.NewStruct("", data), datas.CommitOptions{Parents: p})
s.NoError(err)
return ds.HeadRef()
}
func (s *nomsMergeTestSuite) validateDataset(name string, expected types.Struct, parents ...types.Value) {
db, ds, err := spec.GetDataset(spec.CreateValueSpecString("ldb", s.LdbDir, name))
sp, err := spec.ForDataset(spec.CreateValueSpecString("ldb", s.LdbDir, name))
if s.NoError(err) {
commit := ds.Head()
defer sp.Close()
commit := sp.GetDataset().Head()
s.True(commit.Get(datas.ParentsField).Equals(types.NewSet(parents...)))
merged := ds.HeadValue()
merged := sp.GetDataset().HeadValue()
s.True(expected.Equals(merged), "%s != %s", types.EncodedValue(expected), types.EncodedValue(merged))
}
defer db.Close()
}
func (s *nomsMergeTestSuite) TestNomsMerge_Left() {
@@ -148,7 +151,10 @@ func (s *nomsMergeTestSuite) TestNomsMerge_Conflict() {
}
func (s *nomsMergeTestSuite) TestBadInput() {
sp := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
l, r, o := "left", "right", "output"
type c struct {
args []string
@@ -159,20 +165,20 @@ func (s *nomsMergeTestSuite) TestBadInput() {
{[]string{"foo", "bar"}, "error: Incorrect number of arguments\n"},
{[]string{"foo", "bar", "baz"}, "error: Incorrect number of arguments\n"},
{[]string{"foo", "bar", "baz", "quux", "five"}, "error: Incorrect number of arguments\n"},
{[]string{sp, l + "!!", r, o}, "error: Invalid dataset " + l + "!!, must match [a-zA-Z0-9\\-_/]+\n"},
{[]string{sp, l + "2", r, o}, "error: Dataset " + l + "2 has no data\n"},
{[]string{sp, l, r + "2", o}, "error: Dataset " + r + "2 has no data\n"},
{[]string{sp, l, r, "!invalid"}, "error: Invalid dataset !invalid, must match [a-zA-Z0-9\\-_/]+\n"},
{[]string{sp.Spec, l + "!!", r, o}, "error: Invalid dataset " + l + "!!, must match [a-zA-Z0-9\\-_/]+\n"},
{[]string{sp.Spec, l + "2", r, o}, "error: Dataset " + l + "2 has no data\n"},
{[]string{sp.Spec, l, r + "2", o}, "error: Dataset " + r + "2 has no data\n"},
{[]string{sp.Spec, l, r, "!invalid"}, "error: Invalid dataset !invalid, must match [a-zA-Z0-9\\-_/]+\n"},
}
db, _ := spec.GetDatabase(sp)
db := sp.GetDatabase()
prep := func(dsName string) {
ds := db.GetDataset(dsName)
db.CommitValue(ds, types.NewMap(types.String("foo"), types.String("bar")))
}
prep(l)
prep(r)
db.Close()
for _, c := range cases {
stdout, stderr, err := s.Run(main, append([]string{"merge"}, c.args...))

View File

@@ -38,9 +38,11 @@ func runMigrate(args []string) int {
// TODO: parallelize
// TODO: incrementalize
sourceDb, sourceValue, err := v7spec.GetPath(args[0])
v7Path, err := v7spec.ForPath(args[0])
d.CheckError(err)
defer sourceDb.Close()
defer v7Path.Close()
sourceDb, sourceValue := v7Path.GetDatabase(), v7Path.GetValue()
if sourceValue == nil {
d.CheckErrorNoUsage(fmt.Errorf("Value not found: %s", args[0]))
@@ -48,9 +50,11 @@ func runMigrate(args []string) int {
isCommit := v7datas.IsCommitType(sourceValue.Type())
sinkDb, sinkDataset, err := spec.GetDataset(args[1])
vNewDataset, err := spec.ForDataset(args[1])
d.CheckError(err)
defer sinkDb.Close()
defer vNewDataset.Close()
sinkDb, sinkDataset := vNewDataset.GetDatabase(), vNewDataset.GetDataset()
if isCommit {
// Need to migrate both value and meta fields.

View File

@@ -25,18 +25,16 @@ type nomsMigrateTestSuite struct {
}
func (s *nomsMigrateTestSuite) writeTestData(str string, value v7types.Value, meta v7types.Value) {
db, ds, err := v7spec.GetDataset(str)
sp, err := v7spec.ForDataset(str)
s.NoError(err)
defer sp.Close()
_, err = db.Commit(ds, value, v7datas.CommitOptions{
_, err = sp.GetDatabase().Commit(sp.GetDataset(), value, v7datas.CommitOptions{
Meta: v7types.NewStruct("", v7types.StructData{
"value": meta,
}),
})
s.NoError(err)
err = db.Close()
s.NoError(err)
}
func (s *nomsMigrateTestSuite) TestNomsMigrate() {
@@ -56,10 +54,11 @@ func (s *nomsMigrateTestSuite) TestNomsMigrate() {
s.Equal("", outStr)
s.Equal("", errStr)
destDb, destDs, err := spec.GetDataset(destStr)
sp, err := spec.ForDataset(destStr)
s.NoError(err)
defer destDb.Close()
defer sp.Close()
destDs := sp.GetDataset()
s.True(destDs.HeadValue().Equals(types.String(str)))
s.True(destDs.Head().Get("meta").(types.Struct).Get("value").Equals(types.Number(42)))
}
@@ -83,11 +82,11 @@ func (s *nomsMigrateTestSuite) TestNomsMigrateNonCommit() {
s.Equal("", outStr)
s.Equal("", errStr)
destDb, destDs, err := spec.GetDataset(destStr)
sp, err := spec.ForDataset(destStr)
s.NoError(err)
defer destDb.Close()
defer sp.Close()
s.True(destDs.HeadValue().Equals(types.String(str)))
s.True(sp.GetDataset().HeadValue().Equals(types.String(str)))
}

View File

@@ -7,7 +7,6 @@ package main
import (
"testing"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
@@ -32,15 +31,15 @@ const (
)
func (s *nomsShowTestSuite) writeTestData(str string, value types.Value) types.Ref {
db, ds, err := spec.GetDataset(str)
d.Chk.NoError(err)
sp, err := spec.ForDataset(str)
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
r1 := db.WriteValue(value)
ds, err = db.CommitValue(ds, r1)
d.Chk.NoError(err)
_, err = db.CommitValue(sp.GetDataset(), r1)
s.NoError(err)
err = db.Close()
d.Chk.NoError(err)
return r1
}

View File

@@ -219,10 +219,11 @@ func Run(datasetID string, t *testing.T, suiteT perfSuiteT) {
suite.datasetID = datasetID
// This is the database the perf test results are written to.
db, err := spec.GetDatabase(*perfFlag)
sp, err := spec.ForDatabase(*perfFlag)
if !assert.NoError(err) {
return
}
defer sp.Close()
// List of test runs, each a map of test name => timing info.
testReps := make([]testRep, *perfRepeatFlag)
@@ -255,11 +256,11 @@ func Run(datasetID string, t *testing.T, suiteT perfSuiteT) {
"reps": types.NewList(reps...),
})
db := sp.GetDatabase()
ds := db.GetDataset(*perfPrefixFlag + datasetID)
var err error
ds, err = db.CommitValue(ds, record)
assert.NoError(err)
assert.NoError(db.Close())
}()
if t, ok := suiteT.(testifySuite.SetupAllSuite); ok {

View File

@@ -188,10 +188,10 @@ func runTestSuite(t *testing.T, mem bool) {
assert.Equal(*perfRepeatFlag*len(expectedTests), s.tearDownTest)
// The results should have been written to the "ds" dataset.
db, ds, err := spec.GetDataset(ldbDir + "::ds")
defer db.Close()
sp, err := spec.ForDataset(ldbDir + "::ds")
assert.NoError(err)
head := ds.HeadValue().(types.Struct)
defer sp.Close()
head := sp.GetDataset().HeadValue().(types.Struct)
// These tests mostly assert that the structure of the results is correct. Specific values are hard.
@@ -265,16 +265,16 @@ func TestPrefixFlag(t *testing.T) {
Run("my-prefix/test", t, &PerfSuite{})
// The results should have been written to "foo/my-prefix/test" not "my-prefix/test".
db, ds, err := spec.GetDataset(ldbDir + "::my-prefix/test")
defer db.Close()
sp, err := spec.ForDataset(ldbDir + "::my-prefix/test")
assert.NoError(err)
_, ok := ds.MaybeHead()
defer sp.Close()
_, ok := sp.GetDataset().MaybeHead()
assert.False(ok)
db, ds, err = spec.GetDataset(ldbDir + "::foo/my-prefix/test")
defer db.Close()
sp, err = spec.ForDataset(ldbDir + "::foo/my-prefix/test")
assert.NoError(err)
_, ok = ds.HeadValue().(types.Struct)
defer sp.Close()
_, ok = sp.GetDataset().HeadValue().(types.Struct)
assert.True(ok)
}

View File

@@ -1,265 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
// Package spec provides builders and parsers for spelling Noms databases, datasets and values.
package spec
import (
"fmt"
"net/url"
"regexp"
"strings"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/hash"
"github.com/attic-labs/noms/go/types"
flag "github.com/juju/gnuflag"
)
const (
Separator = "::"
)
var (
datasetRe = regexp.MustCompile("^" + datas.DatasetRe.String() + "$")
ldbStores = map[string]*refCountingLdbStore{}
)
func GetDatabase(str string) (datas.Database, error) {
sp, err := ParseDatabaseSpec(str)
if err != nil {
return nil, err
}
return sp.Database()
}
func GetChunkStore(str string) (chunks.ChunkStore, error) {
sp, err := ParseDatabaseSpec(str)
if err != nil {
return nil, err
}
switch sp.Protocol {
case "ldb":
return getLDBStore(sp.Path), nil
case "mem":
return chunks.NewMemoryStore(), nil
default:
return nil, fmt.Errorf("Unable to create chunkstore for protocol: %s", str)
}
}
func GetDataset(str string) (datas.Database, datas.Dataset, error) {
sp, err := parseDatasetSpec(str)
if err != nil {
return nil, datas.Dataset{}, err
}
ds, err := sp.Dataset()
if err != nil {
return nil, datas.Dataset{}, err
}
return ds.Database(), ds, nil
}
func GetPath(str string) (datas.Database, types.Value, error) {
sp, err := ParsePathSpec(str)
if err != nil {
return nil, nil, err
}
return sp.Value()
}
type DatabaseSpec struct {
Protocol string
Path string
accessToken string
}
type datasetSpec struct {
DbSpec DatabaseSpec
DatasetName string
}
type PathSpec struct {
DbSpec DatabaseSpec
Path AbsolutePath
}
// ParseDatabaseSpec parses a database spec string into its parts
func ParseDatabaseSpec(spec string) (DatabaseSpec, error) {
ldbDatabaseSpec := func(path string) (DatabaseSpec, error) {
if len(path) == 0 {
return DatabaseSpec{}, fmt.Errorf("Empty file system path")
}
return DatabaseSpec{Protocol: "ldb", Path: path}, nil
}
parts := strings.SplitN(spec, ":", 2) // [protocol] [, path]?
protocol := parts[0]
// If there was no ":" then this is either a mem spec, or a filesystem path.
// This is ambiguous if the file system path is "mem" but that just means the path needs to be explicitly "ldb:mem".
if len(parts) == 1 {
if protocol == "mem" {
return DatabaseSpec{Protocol: "mem"}, nil
}
return ldbDatabaseSpec(protocol)
}
path := parts[1]
switch protocol {
case "http", "https":
u, err := url.Parse(spec)
if err != nil || len(u.Host) == 0 {
return DatabaseSpec{}, fmt.Errorf("Invalid URL: %s", spec)
}
token := u.Query().Get("access_token")
return DatabaseSpec{Protocol: protocol, Path: path, accessToken: token}, nil
case "ldb":
return ldbDatabaseSpec(path)
case "mem":
return DatabaseSpec{}, fmt.Errorf(`In-memory database must be specified as "mem", not "mem:%s"`, path)
default:
return DatabaseSpec{}, fmt.Errorf("Invalid database protocol: %s", spec)
}
}
func splitAndParseDatabaseSpec(spec string) (DatabaseSpec, string, error) {
parts := strings.SplitN(spec, "::", 2)
if len(parts) != 2 {
return DatabaseSpec{}, "", fmt.Errorf("Missing :: separator between database and dataset: %s", spec)
}
dbSpec, err := ParseDatabaseSpec(parts[0])
if err != nil {
return DatabaseSpec{}, "", err
}
return dbSpec, parts[1], nil
}
func parseDatasetSpec(spec string) (datasetSpec, error) {
dbSpec, dsName, err := splitAndParseDatabaseSpec(spec)
if err != nil {
return datasetSpec{}, err
}
if !datasetRe.MatchString(dsName) {
return datasetSpec{}, fmt.Errorf("Invalid dataset, must match %s: %s", datas.DatasetRe.String(), dsName)
}
return datasetSpec{dbSpec, dsName}, nil
}
// ParsePathSpec parses a path spec string into its parts
func ParsePathSpec(spec string) (PathSpec, error) {
dbSpec, pathStr, err := splitAndParseDatabaseSpec(spec)
if err != nil {
return PathSpec{}, err
}
path, err := NewAbsolutePath(pathStr)
if err != nil {
return PathSpec{}, err
}
return PathSpec{dbSpec, path}, nil
}
func (spec DatabaseSpec) String() string {
return spec.Protocol + ":" + spec.Path
}
func (spec DatabaseSpec) Database() (ds datas.Database, err error) {
switch spec.Protocol {
case "http", "https":
err = d.Unwrap(d.Try(func() {
ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken)
}))
case "ldb":
err = d.Unwrap(d.Try(func() {
ds = datas.NewDatabase(getLDBStore(spec.Path))
}))
case "mem":
ds = datas.NewDatabase(chunks.NewMemoryStore())
default:
err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol)
}
return
}
func (spec datasetSpec) Dataset() (datas.Dataset, error) {
store, err := spec.DbSpec.Database()
if err != nil {
return datas.Dataset{}, err
}
return store.GetDataset(spec.DatasetName), nil
}
func (spec datasetSpec) String() string {
return spec.DbSpec.String() + Separator + spec.DatasetName
}
func (spec datasetSpec) Value() (datas.Database, types.Value, error) {
dataset, err := spec.Dataset()
if err != nil {
return nil, nil, err
}
if commit, ok := dataset.MaybeHead(); ok {
return dataset.Database(), commit, nil
}
dataset.Database().Close()
return nil, nil, fmt.Errorf("No head value for dataset: %s", spec.DatasetName)
}
func (spec PathSpec) Value() (db datas.Database, val types.Value, err error) {
db, err = spec.DbSpec.Database()
if err != nil {
return
}
val = spec.Path.Resolve(db)
return
}
func (spec PathSpec) String() string {
return spec.DbSpec.String() + Separator + spec.Path.String()
}
func RegisterDatabaseFlags(flags *flag.FlagSet) {
chunks.RegisterLevelDBFlags(flags)
}
func CreateDatabaseSpecString(protocol, path string) string {
return fmt.Sprintf("%s:%s", protocol, path)
}
func CreateValueSpecString(protocol, path, value string) string {
return fmt.Sprintf("%s:%s::%s", protocol, path, value)
}
func CreateHashSpecString(protocol, path string, h hash.Hash) string {
return fmt.Sprintf("%s:%s::#%s", protocol, path, h.String())
}
func getLDBStore(path string) chunks.ChunkStore {
if store, ok := ldbStores[path]; ok {
store.AddRef()
return store
}
store := newRefCountingLdbStore(path, func() {
delete(ldbStores, path)
})
ldbStores[path] = store
return store
}

View File

@@ -1,270 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package spec
import (
"fmt"
"io/ioutil"
"os"
"path"
"testing"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/testify/assert"
)
func TestLDBDatabase(t *testing.T) {
assert := assert.New(t)
d1 := os.TempDir()
dir, err := ioutil.TempDir(d1, "flags")
assert.NoError(err)
ldbDir := path.Join(dir, "store")
spec := fmt.Sprintf("ldb:%s", path.Join(dir, "store"))
cs := chunks.NewLevelDBStoreUseFlags(ldbDir, "")
db := datas.NewDatabase(cs)
ds := db.GetDataset("testDs")
s1 := types.String("A String")
s1Hash := db.WriteValue(s1)
db.CommitValue(ds, s1Hash)
db.Close()
sp, errRead := ParseDatabaseSpec(spec)
assert.NoError(errRead)
store, err := sp.Database()
assert.NoError(err)
assert.Equal(s1, store.ReadValue(s1.Hash()))
store.Close()
os.Remove(dir)
}
func TestMemDatabase(t *testing.T) {
assert := assert.New(t)
spec := "mem"
sp, err := ParseDatabaseSpec(spec)
assert.NoError(err)
store, err := sp.Database()
assert.NoError(err)
r := store.WriteValue(types.Bool(true))
assert.NoError(err)
assert.Equal(types.Bool(true), store.ReadValue(r.TargetHash()))
}
func TestMemDataset(t *testing.T) {
assert := assert.New(t)
spec := "mem::datasetTest"
sp1, err := parseDatasetSpec(spec)
assert.NoError(err)
dataset1, err := sp1.Dataset()
assert.NoError(err)
headVal := types.String("Commit Value")
dsTest, err := dataset1.Database().CommitValue(dataset1, headVal)
assert.NoError(err)
assert.EqualValues(headVal, dsTest.HeadValue())
}
func TestLDBDataset(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
assert.NoError(err)
ldbPath := path.Join(dir, "name")
cs := chunks.NewLevelDBStoreUseFlags(ldbPath, "")
db := datas.NewDatabase(cs)
id := "dsName"
ds := db.GetDataset(id)
headVal := types.String("Commit Value")
ds, err = ds.Database().CommitValue(ds, headVal)
assert.NoError(err)
db.Close()
spec := fmt.Sprintf("ldb:%s::%s", ldbPath, id)
sp, err := parseDatasetSpec(spec)
assert.NoError(err)
dataset, err := sp.Dataset()
assert.NoError(err)
assert.EqualValues(headVal, dataset.HeadValue())
os.Remove(dir)
}
func TestLDBObject(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
assert.NoError(err)
ldbpath := path.Join(dir, "xx-yy")
dsId := "dsId"
cs1 := chunks.NewLevelDBStoreUseFlags(ldbpath, "")
store1 := datas.NewDatabase(cs1)
dataset1 := store1.GetDataset(dsId)
s1 := types.String("Commit Value")
r1 := store1.WriteValue(s1)
dataset1, err = store1.CommitValue(dataset1, r1)
assert.NoError(err)
store1.Close()
spec2 := fmt.Sprintf("ldb:%s::%s", ldbpath, dsId)
assert.NoError(err)
sp1, err := parseDatasetSpec(spec2)
assert.NoError(err)
dataset2, err := sp1.Dataset()
assert.NoError(err)
r2 := dataset2.HeadValue()
s2 := r2.(types.Ref).TargetValue(dataset2.Database())
assert.Equal(s1, s2)
dataset2.Database().Close()
spec3 := fmt.Sprintf("ldb:%s::#%s", ldbpath, s1.Hash().String())
sp3, err := ParsePathSpec(spec3)
assert.NoError(err)
database, v3, err := sp3.Value()
assert.NoError(err)
assert.Equal(s1, v3)
database.Close()
}
func TestReadHash(t *testing.T) {
assert := assert.New(t)
dir, err := ioutil.TempDir(os.TempDir(), "")
assert.NoError(err)
datasetId := "dsName"
ldbPath := path.Join(dir, "/name")
cs1 := chunks.NewLevelDBStoreUseFlags(ldbPath, "")
database1 := datas.NewDatabase(cs1)
dataset1 := database1.GetDataset(datasetId)
commit := types.String("Commit Value")
dataset1, err = database1.CommitValue(dataset1, commit)
assert.NoError(err)
r1 := dataset1.Head().Hash()
dataset1.Database().Close()
spec2 := fmt.Sprintf("ldb:%s::#%s", ldbPath, r1.String())
sp2, err := ParsePathSpec(spec2)
assert.NoError(err)
database, v2, err := sp2.Value()
assert.NoError(err)
assert.EqualValues(r1.String(), v2.Hash().String())
database.Close()
}
func TestDatabaseSpecs(t *testing.T) {
assert := assert.New(t)
badSpecs := []string{"mem:stuff", "mem:", "http:", "https:", "random:", "random:random", "/file/ba:d"}
for _, spec := range badSpecs {
_, err := ParseDatabaseSpec(spec)
assert.Error(err, spec)
}
type testCase struct {
spec, scheme, path, accessToken string
}
testCases := []testCase{
{"http://localhost:8000", "http", "//localhost:8000", ""},
{"http://localhost:8000/fff", "http", "//localhost:8000/fff", ""},
{"https://local.attic.io/john/doe", "https", "//local.attic.io/john/doe", ""},
{"ldb:/filesys/john/doe", "ldb", "/filesys/john/doe", ""},
{"./john/doe", "ldb", "./john/doe", ""},
{"john/doe", "ldb", "john/doe", ""},
{"/john/doe", "ldb", "/john/doe", ""},
{"mem", "mem", "", ""},
{"http://server.com/john/doe?access_token=jane", "http", "//server.com/john/doe?access_token=jane", "jane"},
{"https://server.com/john/doe/?arg=2&qp1=true&access_token=jane", "https", "//server.com/john/doe/?arg=2&qp1=true&access_token=jane", "jane"},
}
for _, tc := range testCases {
dbSpec, err := ParseDatabaseSpec(tc.spec)
assert.NoError(err)
assert.Equal(DatabaseSpec{Protocol: tc.scheme, Path: tc.path, accessToken: tc.accessToken}, dbSpec)
}
}
func TestDatasetSpecs(t *testing.T) {
assert := assert.New(t)
badSpecs := []string{"mem", "mem:", "mem:::ds", "http", "http:", "http://foo", "monkey", "monkey:balls", "mem:/a/bogus/path:dsname", "http://localhost:8000/one"}
for _, spec := range badSpecs {
_, err := parseDatasetSpec(spec)
assert.Error(err, spec)
}
invalidDatasetNames := []string{" ", "", "$", "#", ":", "\n", "💩"}
for _, s := range invalidDatasetNames {
_, err := parseDatasetSpec("mem::" + s)
assert.Error(err)
}
validDatasetNames := []string{"a", "Z", "0", "/", "-", "_"}
for _, s := range validDatasetNames {
_, err := parseDatasetSpec("mem::" + s)
assert.NoError(err)
}
type testCase struct {
spec, scheme, path, ds, accessToken string
}
testCases := []testCase{
{"http://localhost:8000::ds1", "http", "//localhost:8000", "ds1", ""},
{"http://localhost:8000/john/doe/::ds2", "http", "//localhost:8000/john/doe/", "ds2", ""},
{"https://local.attic.io/john/doe::ds3", "https", "//local.attic.io/john/doe", "ds3", ""},
{"http://local.attic.io/john/doe::ds1", "http", "//local.attic.io/john/doe", "ds1", ""},
{"ldb:/filesys/john/doe::ds/one", "ldb", "/filesys/john/doe", "ds/one", ""},
{"http://localhost:8000/john/doe?access_token=abc::ds/one", "http", "//localhost:8000/john/doe?access_token=abc", "ds/one", "abc"},
{"https://localhost:8000?qp1=x&access_token=abc&qp2=y::ds/one", "https", "//localhost:8000?qp1=x&access_token=abc&qp2=y", "ds/one", "abc"},
}
for _, tc := range testCases {
dsSpec, err := parseDatasetSpec(tc.spec)
assert.NoError(err)
dbSpec1 := DatabaseSpec{Protocol: tc.scheme, Path: tc.path, accessToken: tc.accessToken}
assert.Equal(datasetSpec{DbSpec: dbSpec1, DatasetName: tc.ds}, dsSpec)
}
}
func TestPathSpec(t *testing.T) {
assert := assert.New(t)
badSpecs := []string{"mem::#", "mem::#s", "mem::#foobarbaz", "mem::#wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww"}
for _, bs := range badSpecs {
_, err := ParsePathSpec(bs)
assert.Error(err)
}
type testCase struct {
spec, scheme, dbPath, pathStr string
}
testCases := []testCase{
{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
{"ldb:/filesys/john/doe::#0123456789abcdefghijklmnopqrstuv", "ldb", "/filesys/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
{"mem::#0123456789abcdefghijklmnopqrstuv", "mem", "", "#0123456789abcdefghijklmnopqrstuv"},
{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
{"http://localhost:8000/john/doe/::ds1", "http", "//localhost:8000/john/doe/", "ds1"},
}
for _, tc := range testCases {
dbSpec := DatabaseSpec{Protocol: tc.scheme, Path: tc.dbPath, accessToken: ""}
path, err := NewAbsolutePath(tc.pathStr)
assert.NoError(err)
expected := PathSpec{dbSpec, path}
actual, err := ParsePathSpec(tc.spec)
assert.NoError(err)
assert.Equal(expected, actual)
}
}

View File

@@ -9,6 +9,7 @@ package spec
import (
"fmt"
"net/url"
"regexp"
"strings"
"github.com/attic-labs/noms/go/chunks"
@@ -17,6 +18,15 @@ import (
"github.com/attic-labs/noms/go/types"
)
const (
Separator = "::"
)
var (
datasetRe = regexp.MustCompile("^" + datas.DatasetRe.String() + "$")
ldbStores = map[string]*refCountingLdbStore{}
)
// SpecOptions customize Spec behavior.
type SpecOptions struct {
// Authorization token for requests. For example, if the database is HTTP
@@ -146,7 +156,7 @@ func (sp Spec) NewChunkStore() chunks.ChunkStore {
case "http", "https":
return nil
case "ldb":
return getLDBStore(sp.DatabaseName)
return getLdbStore(sp.DatabaseName)
case "mem":
return chunks.NewMemoryStore()
}
@@ -235,7 +245,7 @@ func (sp Spec) createDatabase() datas.Database {
case "http", "https":
return datas.NewRemoteDatabase(sp.Href(), sp.Options.Authorization)
case "ldb":
return datas.NewDatabase(getLDBStore(sp.DatabaseName))
return datas.NewDatabase(getLdbStore(sp.DatabaseName))
case "mem":
return datas.NewDatabase(chunks.NewMemoryStore())
}
@@ -293,3 +303,16 @@ func splitDatabaseSpec(spec string) (string, string, error) {
return spec[:lastIdx], spec[lastIdx+len(Separator):], nil
}
func getLdbStore(path string) chunks.ChunkStore {
if store, ok := ldbStores[path]; ok {
store.AddRef()
return store
}
store := newRefCountingLdbStore(path, func() {
delete(ldbStores, path)
})
ldbStores[path] = store
return store
}

29
go/spec/util.go Normal file
View File

@@ -0,0 +1,29 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package spec
import (
"fmt"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/hash"
flag "github.com/juju/gnuflag"
)
func RegisterDatabaseFlags(flags *flag.FlagSet) {
chunks.RegisterLevelDBFlags(flags)
}
func CreateDatabaseSpecString(protocol, path string) string {
return fmt.Sprintf("%s:%s", protocol, path)
}
func CreateValueSpecString(protocol, path, value string) string {
return fmt.Sprintf("%s:%s%s%s", protocol, path, Separator, value)
}
func CreateHashSpecString(protocol, path string, h hash.Hash) string {
return fmt.Sprintf("%s:%s%s#%s", protocol, path, Separator, h.String())
}

View File

@@ -29,10 +29,10 @@ func (s *bgSuite) TestBlobGet() {
blobBytes := []byte("hello")
blob := types.NewBlob(bytes.NewBuffer(blobBytes))
db, err := spec.GetDatabase(s.TempDir)
sp, err := spec.ForDatabase(s.TempDir)
s.NoError(err)
hash := db.WriteValue(blob)
db.Close()
defer sp.Close()
hash := sp.GetDatabase().WriteValue(blob)
hashSpec := fmt.Sprintf("%s::#%s", s.TempDir, hash.TargetHash().String())
filePath := filepath.Join(s.TempDir, "out")

View File

@@ -91,8 +91,10 @@ func download() (win bool) {
// In order to pin the path, we need to get the path after running it through
// the config file processing.
resolvedPath := cfg.ResolvePathSpec(inPath)
inSpec, err := spec.ParsePathSpec(resolvedPath)
inSpec, err := spec.ForPath(resolvedPath)
d.PanicIfError(err)
defer inSpec.Close()
pinnedPath := pinPath(db, inSpec.Path)
fmt.Println("Resolved in-path:", resolvedPath, "\nPinned path:", pinnedPath)

View File

@@ -11,6 +11,7 @@ import (
"strings"
"testing"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
@@ -29,12 +30,10 @@ type testSuite struct {
func (s testSuite) TestMain() {
commitToDb := func(v types.Value, dsName string, dbSpec string) {
db, err := spec.GetDatabase(dbSpec)
sp, err := spec.ForDataset(dbSpec + spec.Separator + dsName)
s.NoError(err)
defer db.Close()
ds := db.GetDataset(dsName)
db.Commit(ds, v, datas.CommitOptions{})
defer sp.Close()
sp.GetDatabase().Commit(sp.GetDataset(), v, datas.CommitOptions{})
}
testBlobValue := func(db datas.Database, m types.Map, key, expected string) {
@@ -81,12 +80,13 @@ func (s testSuite) TestMain() {
"walked: 1, updated 1, found in cache: 0, errors retrieving: 0",
)
db, v, err := spec.GetPath(dbSpecString + "::out-ds.value")
sp, err := spec.ForPath(dbSpecString + "::out-ds.value")
s.NoError(err)
defer db.Close()
defer sp.Close()
testBlobValue(db, v.(types.Map), "k1", "/one")
testBlobValue(db, v.(types.Map), "k2", "/two")
db, v := sp.GetDatabase(), sp.GetValue().(types.Map)
testBlobValue(db, v, "k1", "/one")
testBlobValue(db, v, "k2", "/two")
mustRunTest(
[]string{"--cache-ds", "cache", dbSpecString + "::in-ds.value", "out-ds"},
@@ -116,9 +116,7 @@ func (s testSuite) TestMain() {
func TestDownloadBlob(t *testing.T) {
assert := assert.New(t)
db, err := spec.GetDatabase("mem")
assert.NoError(err)
defer db.Close()
db := datas.NewDatabase(chunks.NewMemoryStore())
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Hello, client, url: "+r.URL.String())
@@ -135,13 +133,11 @@ func TestDownloadBlob(t *testing.T) {
func TestPinPath(t *testing.T) {
assert := assert.New(t)
db, err := spec.GetDatabase("mem")
assert.NoError(err)
defer db.Close()
db := datas.NewDatabase(chunks.NewMemoryStore())
dsName := "testds"
ds := db.GetDataset(dsName)
ds, err = db.CommitValue(ds, types.NewMap(
ds, err := db.CommitValue(ds, types.NewMap(
types.String("k1"), types.String("v1"),
types.String("k2"), types.String("v2"),
))
@@ -163,16 +159,14 @@ func TestPinPath(t *testing.T) {
func TestGetLastInRoot(t *testing.T) {
assert := assert.New(t)
db, err := spec.GetDatabase("mem")
assert.NoError(err)
defer db.Close()
db := datas.NewDatabase(chunks.NewMemoryStore())
k1 := types.String("k1")
// commit source ds with no sourcePath field
sourceM := types.NewMap(k1, types.String("source commit 1"))
sourceDs := db.GetDataset("test-source-ds")
sourceDs, err = db.Commit(sourceDs, sourceM, datas.CommitOptions{})
sourceDs, err := db.Commit(sourceDs, sourceM, datas.CommitOptions{})
assert.NoError(err)
lastInRoot := getLastInRoot(db, sourceDs.Head())
assert.Nil(lastInRoot)

View File

@@ -32,13 +32,12 @@ func randomString(slen int) string {
}
func randomBlob(s *resourceCacheTestSuite, slen int) types.Blob {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer db.Close()
defer sp.Close()
s1 := randomString(slen)
blob := types.NewStreamingBlob(db, strings.NewReader(s1))
blob := types.NewStreamingBlob(sp.GetDatabase(), strings.NewReader(s1))
return blob
}
@@ -59,10 +58,11 @@ func TestResourceCache(t *testing.T) {
func (s *resourceCacheTestSuite) TestResourceCacheGet() {
dsName := "testCache"
cache1 := func(k types.String, v types.Blob, setNewValue bool) (types.Ref, types.Ref) {
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer db.Close()
defer sp.Close()
db := sp.GetDatabase()
hr, _ := db.GetDataset(dsName).MaybeHeadRef()
rc, err := getResourceCache(db, dsName)
@@ -96,9 +96,12 @@ func (s *resourceCacheTestSuite) TestResourceCacheGet() {
hr1, hr2 = cache1(types.String("key2"), blob2, true)
s.False(hr1.Equals(hr2))
str := spec.CreateDatabaseSpecString("ldb", s.LdbDir)
db, err := spec.GetDatabase(str)
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("ldb", s.LdbDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase()
rc, err := getResourceCache(db, dsName)
s.NoError(err)
s.Equal(uint64(2), rc.len())
@@ -110,21 +113,21 @@ func (s *resourceCacheTestSuite) TestResourceCacheGet() {
}
func (s *resourceCacheTestSuite) TestCheckCacheType() {
blob1 := randomBlob(s, 30)
blob1 := randomBlob(s, 30)
badTestCases := []types.Value {
types.NewStruct("testStruct", types.StructData{"f1": types.String("f1value")}),
types.NewMap(types.Number(1), types.NewRef(blob1)),
types.NewMap(types.String("s1"), types.String("badtype")),
types.NewMap(types.String("s1"), types.NewRef(types.String("badtype"))),
}
for _, tc := range badTestCases {
err := checkCacheType(tc)
s.Error(err)
}
badTestCases := []types.Value{
types.NewStruct("testStruct", types.StructData{"f1": types.String("f1value")}),
types.NewMap(types.Number(1), types.NewRef(blob1)),
types.NewMap(types.String("s1"), types.String("badtype")),
types.NewMap(types.String("s1"), types.NewRef(types.String("badtype"))),
}
c1 := types.NewMap(types.String("s1"), types.NewRef(blob1))
err := checkCacheType(c1)
s.NoError(err)
for _, tc := range badTestCases {
err := checkCacheType(tc)
s.Error(err)
}
c1 := types.NewMap(types.String("s1"), types.NewRef(blob1))
err := checkCacheType(c1)
s.NoError(err)
}

View File

@@ -9,7 +9,6 @@ import (
"testing"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/marshal"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
@@ -23,8 +22,7 @@ func TestBasics(t *testing.T) {
type testSuite struct {
clienttest.ClientTestSuite
db datas.Database
ds datas.Dataset
sp spec.Spec
}
type Face struct {
Name string
@@ -123,8 +121,13 @@ func getPhotoOutput(photo Photo, faces types.Set) PhotoOutput {
}
func (s *testSuite) SetupTest() {
sp := fmt.Sprintf("ldb:%s::test", s.LdbDir)
s.db, s.ds, _ = spec.GetDataset(sp)
var err error
s.sp, err = spec.ForDataset(fmt.Sprintf("ldb:%s::test", s.LdbDir))
s.NoError(err)
}
func (s *testSuite) TearDownTest() {
s.sp.Close()
}
func (s *testSuite) TestMerge() {
@@ -165,9 +168,8 @@ func (s *testSuite) TestMerge() {
v, err := marshal.Marshal(photos)
s.NoError(err)
s.ds, err = s.db.CommitValue(s.ds, v)
_, err = s.sp.GetDatabase().CommitValue(s.sp.GetDataset(), v)
s.NoError(err)
s.db.Close()
verifyOutput := func(photoA types.Struct, photoB types.Struct) {
s.Equal(photoA.Get("title").Equals(photoB.Get("title")), true)
@@ -181,9 +183,10 @@ func (s *testSuite) TestMerge() {
stdo, _ := s.MustRun(main, []string{"--out-ds", "idx", "--db", s.LdbDir, "test"})
fmt.Println(stdo)
_, ds, _ := spec.GetDataset(fmt.Sprintf("%s::idx", s.LdbDir))
sp, err := spec.ForDataset(fmt.Sprintf("%s::idx", s.LdbDir))
s.NoError(err)
val := types.Set{}
marshal.Unmarshal(ds.HeadValue(), &val)
marshal.Unmarshal(sp.GetDataset().HeadValue(), &val)
val.IterAll(func(v types.Value) {
var testOutput types.Value

View File

@@ -24,8 +24,9 @@ type testSuite struct {
}
func (s *testSuite) TestWin() {
sp := fmt.Sprintf("ldb:%s::test", s.LdbDir)
db, ds, _ := spec.GetDataset(sp)
sp, err := spec.ForDataset(fmt.Sprintf("ldb:%s::test", s.LdbDir))
s.NoError(err)
defer sp.Close()
type Face struct {
Name string
@@ -92,13 +93,15 @@ func (s *testSuite) TestWin() {
v, err := marshal.Marshal(photos)
s.NoError(err)
ds, err = db.CommitValue(ds, v)
_, err = sp.GetDatabase().CommitValue(sp.GetDataset(), v)
s.NoError(err)
db.Close()
_, _ = s.MustRun(main, []string{"--out-ds", "idx", "--db", s.LdbDir, "test"})
db, ds, _ = spec.GetDataset(fmt.Sprintf("%s::idx", s.LdbDir))
sp, err = spec.ForDataset(fmt.Sprintf("%s::idx", s.LdbDir))
s.NoError(err)
defer sp.Close()
var idx struct {
ByDate map[int]types.Set
ByTag map[string]map[int]types.Set
@@ -106,7 +109,7 @@ func (s *testSuite) TestWin() {
TagsByCount map[int]types.Set
FacesByCount map[int]types.Set
}
marshal.Unmarshal(ds.HeadValue(), &idx)
marshal.Unmarshal(sp.GetDataset().HeadValue(), &idx)
s.Equal(5, len(idx.ByDate))
for i := 0; i < 5; i++ {

View File

@@ -23,16 +23,17 @@ type testSuite struct {
}
func (s *testSuite) TestWin() {
sp := fmt.Sprintf("ldb:%s::test", s.LdbDir)
db, ds, _ := spec.GetDataset(sp)
ds, _ = db.CommitValue(ds, types.NewStruct("", map[string]types.Value{
sp, err := spec.ForDataset(fmt.Sprintf("ldb:%s::test", s.LdbDir))
s.NoError(err)
defer sp.Close()
sp.GetDatabase().CommitValue(sp.GetDataset(), types.NewStruct("", map[string]types.Value{
"num": types.Number(42),
"str": types.String("foobar"),
"lst": types.NewList(types.Number(1), types.String("foo")),
"map": types.NewMap(types.Number(1), types.String("foo"),
types.String("foo"), types.Number(1)),
}))
db.Close()
changes := map[string]string{
".num": "43",
@@ -43,14 +44,16 @@ func (s *testSuite) TestWin() {
}
for k, v := range changes {
stdout, stderr, err := s.Run(main, []string{sp, k, v})
stdout, stderr, err := s.Run(main, []string{sp.Spec, k, v})
s.Equal("", stdout)
s.Equal("", stderr)
s.Equal(nil, err)
}
_, ds, _ = spec.GetDataset(sp)
r := ds.HeadValue()
sp, _ = spec.ForDataset(sp.Spec)
defer sp.Close()
r := sp.GetDataset().HeadValue()
for k, vs := range changes {
v, _, _, _ := types.ParsePathIndex(vs)
p, err := types.ParsePath(k)
@@ -61,31 +64,31 @@ func (s *testSuite) TestWin() {
}
func (s *testSuite) TestLose() {
sp := fmt.Sprintf("ldb:%s::test", s.LdbDir)
type c struct {
sp, err := spec.ForDataset(fmt.Sprintf("ldb:%s::test", s.LdbDir))
s.NoError(err)
defer sp.Close()
cases := []struct {
args []string
err string
}
cases := []c{
}{
{[]string{"foo"}, "Incorrect number of arguments\n"},
{[]string{"foo", "bar"}, "Incorrect number of arguments\n"},
{[]string{"foo", "bar", "baz", "quux"}, "Incorrect number of arguments\n"},
{[]string{sp + "!!", ".foo", `"bar"`}, "Invalid input dataset '" + sp + "!!': Dataset test!! must match ^[a-zA-Z0-9\\-_/]+$\n"},
{[]string{sp + "2", ".foo", `"bar"`}, "Input dataset '" + sp + "2' does not exist\n"},
{[]string{sp, "[invalid", `"bar"`}, "Invalid path '[invalid': Invalid index: invalid\n"},
{[]string{sp, ".nothinghere", `"bar"`}, "No value at path '.nothinghere' - cannot update\n"},
{[]string{sp, ".foo", "bar"}, "Invalid new value: 'bar': Invalid index: bar\n"},
{[]string{"--out-ds-name", "!invalid", sp, ".foo", `"bar"`}, "Invalid output dataset name: !invalid\n"},
{[]string{sp, `.bar["baz"]@key`, "42"}, "Error updating path [\"baz\"]@key: @key paths not supported\n"},
{[]string{sp, `.bar[#00000000000000000000000000000000]`, "42"}, "Invalid path '.bar[#00000000000000000000000000000000]': Invalid hash: 00000000000000000000000000000000\n"},
{[]string{sp.Spec + "!!", ".foo", `"bar"`}, "Invalid input dataset '" + sp.Spec + "!!': Dataset test!! must match ^[a-zA-Z0-9\\-_/]+$\n"},
{[]string{sp.Spec + "2", ".foo", `"bar"`}, "Input dataset '" + sp.Spec + "2' does not exist\n"},
{[]string{sp.Spec, "[invalid", `"bar"`}, "Invalid path '[invalid': Invalid index: invalid\n"},
{[]string{sp.Spec, ".nothinghere", `"bar"`}, "No value at path '.nothinghere' - cannot update\n"},
{[]string{sp.Spec, ".foo", "bar"}, "Invalid new value: 'bar': Invalid index: bar\n"},
{[]string{"--out-ds-name", "!invalid", sp.Spec, ".foo", `"bar"`}, "Invalid output dataset name: !invalid\n"},
{[]string{sp.Spec, `.bar["baz"]@key`, "42"}, "Error updating path [\"baz\"]@key: @key paths not supported\n"},
{[]string{sp.Spec, `.bar[#00000000000000000000000000000000]`, "42"}, "Invalid path '.bar[#00000000000000000000000000000000]': Invalid hash: 00000000000000000000000000000000\n"},
}
db, ds, _ := spec.GetDataset(sp)
db.CommitValue(ds, types.NewStruct("", map[string]types.Value{
sp.GetDatabase().CommitValue(sp.GetDataset(), types.NewStruct("", map[string]types.Value{
"foo": types.String("foo"),
"bar": types.NewMap(types.String("baz"), types.Number(42)),
}))
db.Close()
for _, c := range cases {
stdout, stderr, err := s.Run(main, c.args)

View File

@@ -46,14 +46,14 @@ func (s *testSuite) TestImportFromStdin() {
// Run() will return when blobOut is closed.
s.MustRun(main, []string{"--stdin", dsName})
db, blob, err := spec.GetPath(dsName + ".value")
sp, err := spec.ForPath(dsName + ".value")
assert.NoError(err)
defer db.Close()
defer sp.Close()
expected := types.NewBlob(bytes.NewBufferString("abcdef"))
assert.True(expected.Equals(blob))
assert.True(expected.Equals(sp.GetValue()))
ds := db.GetDataset("ds")
ds := sp.GetDatabase().GetDataset("ds")
meta := ds.Head().Get(datas.MetaField).(types.Struct)
// The meta should only have a "date" field.
metaDesc := meta.Type().Desc.(types.StructDesc)
@@ -73,14 +73,14 @@ func (s *testSuite) TestImportFromFile() {
dsName := spec.CreateValueSpecString("ldb", s.LdbDir, "ds")
s.MustRun(main, []string{f.Name(), dsName})
db, blob, err := spec.GetPath(dsName + ".value")
sp, err := spec.ForPath(dsName + ".value")
assert.NoError(err)
defer db.Close()
defer sp.Close()
expected := types.NewBlob(bytes.NewBufferString("abcdef"))
assert.True(expected.Equals(blob))
assert.True(expected.Equals(sp.GetValue()))
ds := db.GetDataset("ds")
ds := sp.GetDatabase().GetDataset("ds")
meta := ds.Head().Get(datas.MetaField).(types.Struct)
metaDesc := meta.Type().Desc.(types.StructDesc)
assert.Equal(2, metaDesc.Len())