Git for data (#138)

Almost functional dolt
This commit is contained in:
Brian Hendriks
2018-12-19 10:03:12 -08:00
committed by GitHub
parent 4b1aa57524
commit 2185a339e4
1604 changed files with 582544 additions and 37551 deletions
-9
View File
@@ -1,9 +0,0 @@
noms.iml
*.pyc
*.swp
.vscode
.idea
.noms
.nomsconfig
.DS_Store
node_modules
Vendored
-37
View File
@@ -1,37 +0,0 @@
pipeline {
agent {
kubernetes {
label "liquidata-inc-ld-build"
}
}
stages {
stage('Test') {
environment {
GIT_SSH_KEYFILE = credentials("liquidata-inc-ssh")
PATH = "${pwd()}/.ci_bin:${env.PATH}"
GIT_SSH = "${pwd()}/.ci_bin/cred_ssh"
LD_SKIP_POSTGRES = "y"
NOMS_VERSION_NEXT = "1"
}
steps {
dir (".ci_bin") {
writeFile file: "cred_ssh", text: '''\
#!/bin/sh
exec /usr/bin/ssh -i $GIT_SSH_KEYFILE -o StrictHostKeyChecking=no "$@"
'''.stripIndent()
sh "chmod +x cred_ssh"
writeFile file: "git", text: '''\
#!/bin/sh
exec /usr/bin/git -c url."ssh://git@github.com:".insteadOf=https://github.com "$@"
'''.stripIndent()
sh "chmod +x git"
}
dir (".") {
sh "go get ./cmd/... ./go/..."
sh "go test -mod=readonly -test.v ./cmd/... ./go/..."
}
}
}
}
}
-5
View File
@@ -1,5 +0,0 @@
doltdb
======
The persistence layer for dolt. Based on [noms](https://github.com/attic-labs/noms/),
but adapted for storing table data.
-1
View File
@@ -1 +0,0 @@
noms
-24
View File
@@ -1,24 +0,0 @@
## Example
```shell
cd $GOPATH/src/github.com/attic-labs/noms/samples/go/counter
go build
./counter /tmp/nomsdb::counter
./counter /tmp/nomsdb::counter
./counter /tmp/nomsdb::counter
noms serve /tmp/nomsdb
```
Then, in a separate shell:
```shell
# This starts where the previous count left off because we're serving the same database
./counter http://localhost:8000::counter
# Display the datasets at this server
noms ds http://localhost:8000
# Print the history of the counter dataset
noms log http://localhost:8000::counter
```
-165
View File
@@ -1,165 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/types"
)
type CommitIterator struct {
db datas.Database
branches branchList
}
// NewCommitIterator initializes a new CommitIterator with the first commit to be printed.
func NewCommitIterator(db datas.Database, commit types.Struct) *CommitIterator {
cr := types.NewRef(commit)
return &CommitIterator{db: db, branches: branchList{branch{cr: cr, commit: commit}}}
}
// Next returns information about the next commit to be printed. LogNode contains enough contextual
// info that the commit and associated graph can be correctly printed.
// This works by traversing the "commit" di-graph in a breadth-first manner. Each time it is called,
// the commit in the branchlist with the greatest height is returned. If that commit has multiple
// parents, new branches are added to the branchlist so that they can be traversed in order. When
// more than one branch contains the same node, that indicates that the branches are converging and so
// the branchlist will have branches removed to reflect that.
func (iter *CommitIterator) Next(ctx context.Context) (LogNode, bool) {
if iter.branches.IsEmpty() {
return LogNode{}, false
}
// Float of branches present when printing this commit
startingColCount := len(iter.branches)
branchIndexes := iter.branches.HighestBranchIndexes()
col := branchIndexes[0]
br := iter.branches[col]
// Any additional indexes, represent other branches with the same ancestor. So they are merging
// into a common ancestor and are no longer graphed.
iter.branches = iter.branches.RemoveBranches(branchIndexes[1:])
// If this commit has parents, then a branch is splitting. Create a branch for each of the parents
// and splice that into the iterators list of branches.
branches := branchList{}
parents := commitRefsFromSet(ctx, br.commit.Get(datas.ParentsField).(types.Set))
for _, p := range parents {
b := branch{cr: p, commit: iter.db.ReadValue(ctx, p.TargetHash()).(types.Struct)}
branches = append(branches, b)
}
iter.branches = iter.branches.Splice(col, 1, branches...)
// Collect the indexes for any newly created branches.
newCols := []int{}
for cnt := 1; cnt < len(parents); cnt++ {
newCols = append(newCols, col+cnt)
}
// Now that the branchlist has been adusted, check to see if there are branches with common
// ancestors that will be folded together on this commit's graph.
foldedCols := iter.branches.HighestBranchIndexes()
node := LogNode{
cr: br.cr,
commit: br.commit,
startingColCount: startingColCount,
endingColCount: len(iter.branches),
col: col,
newCols: newCols,
foldedCols: foldedCols,
lastCommit: iter.branches.IsEmpty(),
}
return node, true
}
type LogNode struct {
cr types.Ref // typed ref of commit to be printed
commit types.Struct // commit that needs to be printed
startingColCount int // how many branches are being tracked when this commit is printed
endingColCount int // home many branches will be tracked when next commit is printed
col int // col to put the '*' character in graph
newCols []int // col to start using '\' in graph
foldedCols []int // cols with common ancestors, that will get folded together
lastCommit bool // this is the last commit that will be returned by iterator
}
func (n LogNode) String() string {
return fmt.Sprintf("cr: %s(%d), startingColCount: %d, endingColCount: %d, col: %d, newCols: %v, foldedCols: %v, expanding: %t, shrunk: %t, shrinking: %t", n.cr.TargetHash().String()[0:9], n.cr.Height(), n.startingColCount, n.endingColCount, n.col, n.newCols, n.foldedCols, n.Expanding(), n.Shrunk(), n.Shrinking())
}
// Expanding reports whether this commit's graph will expand to show an additional branch
func (n LogNode) Expanding() bool {
return n.startingColCount < n.endingColCount
}
// Shrinking reports whether this commit's graph will show a branch being folded into another branch
func (n LogNode) Shrinking() bool {
return len(n.foldedCols) > 1
}
// Shrunk reports whether the previous commit showed a branch being folded into another branch.
func (n LogNode) Shrunk() bool {
return n.startingColCount > n.endingColCount
}
type branch struct {
cr types.Ref
commit types.Struct
}
func (b branch) String() string {
return fmt.Sprintf("%s(%d)", b.cr.TargetHash().String()[0:9], b.cr.Height())
}
type branchList []branch
func (bl branchList) IsEmpty() bool {
return len(bl) == 0
}
// look through this list of branches and return the one(s) with the max height.
// If there are multiple nodes with max height, the result will contain a list of all nodes with
// maxHeight that are duplicates of the first one found.
// This indicates that two or more branches or converging.
func (bl branchList) HighestBranchIndexes() []int {
maxHeight := uint64(0)
var cr types.Ref
cols := []int{}
for i, b := range bl {
if b.cr.Height() > maxHeight {
maxHeight = b.cr.Height()
cr = b.cr
cols = []int{i}
} else if b.cr.Height() == maxHeight && b.cr.Equals(cr) {
cols = append(cols, i)
}
}
return cols
}
func (bl branchList) Splice(start int, deleteCount int, branches ...branch) branchList {
res := append(branchList{}, bl[:start]...)
res = append(res, branches...)
return append(res, bl[start+deleteCount:]...)
}
func (bl branchList) RemoveBranches(indexes []int) branchList {
for i := len(indexes) - 1; i >= 0; i-- {
bl = bl.Splice(indexes[i], 1)
}
return bl
}
func commitRefsFromSet(ctx context.Context, set types.Set) []types.Ref {
res := []types.Ref{}
set.IterAll(ctx, func(v types.Value) {
res = append(res, v.(types.Ref))
})
return res
}
-232
View File
@@ -1,232 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"log"
"math/rand"
"os"
"strings"
"time"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/util/exit"
"github.com/attic-labs/noms/go/util/profile"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var commands = []*util.Command{
nomsCommit,
nomsConfig,
nomsDiff,
nomsDs,
nomsLog,
nomsMerge,
nomsRoot,
nomsShow,
nomsSync,
nomsVersion,
nomsManifest,
nomsCat,
}
var kingpinCommands = []util.KingpinCommand{
nomsBlob,
nomsList,
nomsMap,
nomsSet,
nomsStats,
nomsStruct,
}
var actions = []string{
"interacting with",
"poking at",
"goofing with",
"dancing with",
"playing with",
"contemplation of",
"showing off",
"jiggerypokery of",
"singing to",
"nomming on",
"fighting with",
}
func usageString() string {
i := rand.New(rand.NewSource(time.Now().UnixNano())).Intn(len(actions))
return fmt.Sprintf(`Noms is a tool for %s Noms data.`, actions[i])
}
func main() {
// allow short (-h) help
kingpin.EnableFileExpansion = false
kingpin.CommandLine.HelpFlag.Short('h')
noms := kingpin.New("noms", usageString())
// global flags
cpuProfileVal := noms.Flag("cpuprofile", "write cpu profile to file").String()
memProfileVal := noms.Flag("memprofile", "write memory profile to file").String()
blockProfileVal := noms.Flag("blockprofile", "write block profile to file").String()
verboseVal := noms.Flag("verbose", "show more").Short('v').Bool()
quietVal := noms.Flag("quiet", "show less").Short('q').Bool()
// set up docs for non-kingpin commands
addNomsDocs(noms)
handlers := map[string]util.KingpinHandler{}
// install kingpin handlers
for _, cmdFunction := range kingpinCommands {
command, handler := cmdFunction(context.Background(), noms)
handlers[command.FullCommand()] = handler
}
input := kingpin.MustParse(noms.Parse(os.Args[1:]))
// apply global flags
profile.ApplyProfileFlags(cpuProfileVal, memProfileVal, blockProfileVal)
verbose.SetVerbose(*verboseVal)
verbose.SetQuiet(*quietVal)
if handler := handlers[strings.Split(input, " ")[0]]; handler != nil {
handler(input)
}
// fall back to previous (non-kingpin) noms commands
flag.Parse(false)
args := flag.Args()
// Don't prefix log messages with timestamp when running interactively
log.SetFlags(0)
for _, cmd := range commands {
if cmd.Name() == args[0] {
flags := cmd.Flags()
flags.Usage = cmd.Usage
flags.Parse(true, args[1:])
args = flags.Args()
if cmd.Nargs != 0 && len(args) < cmd.Nargs {
cmd.Usage()
}
exitCode := cmd.Run(context.Background(), args)
if exitCode != 0 {
exit.Exit(exitCode)
}
return
}
}
}
// addDatabaseArg adds a "database" arg to the passed command
func addDatabaseArg(cmd *kingpin.CmdClause) (arg *string) {
return cmd.Arg("database", "a noms database path").Required().String() // TODO: custom parser for noms db URL?
}
// addNomsDocs - adds documentation (docs only, not commands) for existing (pre-kingpin) commands.
func addNomsDocs(noms *kingpin.Application) {
// commmit
commit := noms.Command("commit", `Commits a specified value as head of the dataset
If absolute-path is not provided, then it is read from stdin. See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the dataset and absolute-path arguments.
`)
commit.Flag("allow-dupe", "creates a new commit, even if it would be identical (modulo metadata and parents) to the existing HEAD.").Default("0").Int()
commit.Flag("date", "alias for -meta 'date=<date>'. '<date>' must be iso8601-formatted. If '<date>' is empty, it defaults to the current date.").String()
commit.Flag("message", "alias for -meta 'message=<message>'").String()
commit.Flag("meta", "'<key>=<value>' - creates a metadata field called 'key' set to 'value'. Value should be human-readable encoded.").String()
commit.Flag("meta-p", "'<key>=<path>' - creates a metadata field called 'key' set to the value at <path>").String()
commit.Arg("absolute-path", "the path to read data from").String()
// TODO: this should be required, but kingpin does not allow required args after non-required ones. Perhaps a custom type would fix that?
commit.Arg("database", "a noms database path").String()
// config
noms.Command("config", "Prints the active configuration if a .nomsconfig file is present")
// diff
diff := noms.Command("diff", `Shows the difference between two objects
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object arguments.
`)
diff.Flag("stat", "Writes a summary of the changes instead").Short('s').Bool()
diff.Arg("object1", "").Required().String()
diff.Arg("object2", "").Required().String()
// ds
ds := noms.Command("ds", `Noms dataset management
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.
`)
ds.Flag("delete", "dataset to delete").Short('d').String()
ds.Arg("database", "a noms database path").String()
// log
log := noms.Command("log", `Displays the history of a path
See Spelling Values at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the <path-spec> parameter.
`)
log.Flag("color", "value of 1 forces color on, 0 forces color off").Default("-1").Int()
log.Flag("max-lines", "max number of lines to show per commit (-1 for all lines)").Default("9").Int()
log.Flag("max-commits", "max number of commits to display (0 for all commits)").Short('n').Default("0").Int()
log.Flag("oneline", "show a summary of each commit on a single line").Bool()
log.Flag("graph", "show ascii-based commit hierarchy on left side of output").Bool()
log.Flag("show-value", "show commit value rather than diff information").Bool()
log.Flag("tz", "display formatted date comments in specified timezone, must be: local or utc").Enum("local", "utc")
log.Arg("path-spec", "").Required().String()
// merge
merge := noms.Command("merge", `Merges and commits the head values of two named datasets
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.
You must provide a working database and the names of two Datasets you want to merge. The values at the heads of these Datasets will be merged, put into a new Commit object, and set as the Head of the third provided Dataset name.
`)
merge.Flag("policy", "conflict resolution policy for merging. Defaults to 'n', which means no resolution strategy will be applied. Supported values are 'l' (left), 'r' (right) and 'p' (prompt). 'prompt' will bring up a simple command-line prompt allowing you to resolve conflicts by choosing between 'l' or 'r' on a case-by-case basis.").Default("n").Enum("n", "r", "l", "p")
addDatabaseArg(merge)
merge.Arg("left-dataset-name", "a dataset").Required().String()
merge.Arg("right-dataset-name", "a dataset").Required().String()
merge.Arg("output-dataset-name", "a dataset").Required().String()
// root
root := noms.Command("root", `Get or set the current root hash of the entire database
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.
`)
root.Flag("update", "Replaces the entire database with the one with the given hash").String()
addDatabaseArg(root)
// show
show := noms.Command("show", `Shows a serialization of a Noms object
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.
`)
show.Flag("raw", "If true, dumps the raw binary version of the data").Bool()
show.Flag("stats", "If true, reports statistics related to the value").Bool()
show.Flag("tz", "display formatted date comments in specified timezone, must be: local or utc").Enum("local", "utc")
show.Arg("object", "a noms object").Required().String()
// sync
sync := noms.Command("sync", `Moves datasets between or within databases
See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object and dataset arguments.
`)
sync.Flag("parallelism", "").Short('p').Default("512").Int()
sync.Arg("source-object", "a noms source object").Required().String()
sync.Arg("dest-dataset", "a noms dataset").Required().String()
// version
noms.Command("version", "Print the noms version")
//manifest
manifest := noms.Command("manifest", `Prints a database's manifest in a more readable format`)
addDatabaseArg(manifest)
//cat
cat := noms.Command("cat", `Prints the contents of an nbs file`)
cat.Arg("nbs-file", "nbs file").Required().String()
cat.Flag("raw", "If true, includes the raw binary version of each chunk in the nbs file").Bool()
cat.Flag("decompressed", "If true, includes the decompressed binary version of each chunk in the nbs file").Bool()
cat.Flag("no-show", "If true, skips printing of the value").Bool()
cat.Flag("hashes-only", "If true, only prints the b32 hashes").Bool()
}
-40
View File
@@ -1,40 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"runtime"
"strconv"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/d"
)
func nomsBlob(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
blob := noms.Command("blob", "interact with blobs")
blobPut := blob.Command("put", "imports a blob to a dataset")
concurrency := blobPut.Flag("concurrency", "number of concurrent HTTP calls to retrieve remote resources").Default(strconv.Itoa(runtime.NumCPU())).Int()
putFile := blobPut.Arg("file", "a file to import").Required().String()
putDs := blobPut.Arg("dataset", "the path to import to").Required().String()
blobGet := blob.Command("export", "exports a blob from a dataset")
getDs := blobGet.Arg("dataset", "the dataset to export").Required().String()
getPath := blobGet.Arg("file", "an optional file to save the blob to").String()
return blob, func(input string) int {
switch input {
case blobPut.FullCommand():
return nomsBlobPut(ctx, *putFile, *putDs, *concurrency)
case blobGet.FullCommand():
return nomsBlobGet(ctx, *getDs, *getPath)
}
d.Panic("notreached")
return 1
}
}
-69
View File
@@ -1,69 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"io"
"os"
"time"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/profile"
"github.com/attic-labs/noms/go/util/progressreader"
"github.com/attic-labs/noms/go/util/status"
humanize "github.com/dustin/go-humanize"
)
func nomsBlobGet(ctx context.Context, ds string, filePath string) int {
cfg := config.NewResolver()
var blob types.Blob
if db, val, err := cfg.GetPath(ctx, ds); err != nil {
d.CheckErrorNoUsage(err)
} else if val == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at %s", ds))
} else if b, ok := val.(types.Blob); !ok {
d.CheckErrorNoUsage(fmt.Errorf("Value at %s is not a blob", ds))
} else {
defer db.Close()
blob = b
}
defer profile.MaybeStartProfile().Stop()
if filePath == "" {
blob.Copy(ctx, os.Stdout)
return 0
}
// Note: overwrites any existing file.
file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE, 0644)
d.CheckErrorNoUsage(err)
defer file.Close()
start := time.Now()
expected := humanize.Bytes(blob.Len())
// Create a pipe so that we can connect a progress reader
preader, pwriter := io.Pipe()
go func() {
blob.Copy(ctx, pwriter)
pwriter.Close()
}()
blobReader := progressreader.New(preader, func(seen uint64) {
elapsed := time.Since(start).Seconds()
rate := uint64(float64(seen) / elapsed)
status.Printf("%s of %s written in %ds (%s/s)...", humanize.Bytes(seen), expected, int(elapsed), humanize.Bytes(rate))
})
io.Copy(file, blobReader)
status.Done()
return 0
}
-53
View File
@@ -1,53 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"path/filepath"
"testing"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
func TestNomsBlobGet(t *testing.T) {
suite.Run(t, &nbeSuite{})
}
type nbeSuite struct {
clienttest.ClientTestSuite
}
func (s *nbeSuite) TestNomsBlobGet() {
sp, err := spec.ForDatabase(s.TempDir)
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
blobBytes := []byte("hello")
blob := types.NewBlob(context.Background(), db, bytes.NewBuffer(blobBytes))
ref := db.WriteValue(context.Background(), blob)
_, err = db.CommitValue(context.Background(), db.GetDataset(context.Background(), "datasetID"), ref)
s.NoError(err)
hashSpec := fmt.Sprintf("%s::#%s", s.TempDir, ref.TargetHash().String())
filePath := filepath.Join(s.TempDir, "out")
s.MustRun(main, []string{"blob", "export", hashSpec, filePath})
fileBytes, err := ioutil.ReadFile(filePath)
s.NoError(err)
s.Equal(blobBytes, fileBytes)
stdout, _ := s.MustRun(main, []string{"blob", "export", hashSpec})
fmt.Println("stdout:", stdout)
s.Equal(blobBytes, []byte(stdout))
}
-64
View File
@@ -1,64 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"errors"
"fmt"
"io"
"os"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/profile"
)
func nomsBlobPut(ctx context.Context, filePath string, dsPath string, concurrency int) int {
info, err := os.Stat(filePath)
if err != nil {
d.CheckError(errors.New("couldn't stat file"))
}
defer profile.MaybeStartProfile().Stop()
fileSize := info.Size()
chunkSize := fileSize / int64(concurrency)
if chunkSize < (1 << 20) {
chunkSize = 1 << 20
}
readers := make([]io.Reader, fileSize/chunkSize)
for i := 0; i < len(readers); i++ {
r, err := os.Open(filePath)
d.CheckErrorNoUsage(err)
defer r.Close()
r.Seek(int64(i)*chunkSize, 0)
limit := chunkSize
if i == len(readers)-1 {
limit += fileSize % chunkSize // adjust size of last slice to include the final bytes.
}
lr := io.LimitReader(r, limit)
readers[i] = lr
}
cfg := config.NewResolver()
db, ds, err := cfg.GetDataset(ctx, dsPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not create dataset: %s\n", err)
return 1
}
defer db.Close()
blob := types.NewBlob(ctx, db, readers...)
_, err = db.CommitValue(ctx, ds, blob)
if err != nil {
fmt.Fprintf(os.Stderr, "Error committing: %s\n", err)
return 1
}
return 0
}
-358
View File
@@ -1,358 +0,0 @@
package main
import (
"context"
"encoding/base32"
"encoding/binary"
"encoding/hex"
"fmt"
"hash/crc32"
"io"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/types"
"github.com/golang/snappy"
flag "github.com/juju/gnuflag"
"github.com/attic-labs/noms/cmd/util"
)
const (
u64Size = 8
u32Size = 4
crcSize = u32Size
prefixSize = u64Size
ordinalSize = u32Size
chunkSizeSize = u32Size
suffixSize = 12
chunkCntSize = u32Size
totalUncmpSize = u64Size
magicSize = u64Size
magicNumber uint64 = 0xffb5d8c22463ee50
)
var (
catRaw = false
catDecomp = false
catNoShow = false
catHashesOnly = false
)
var nomsCat = &util.Command{
Run: runCat,
UsageLine: "cat <file>",
Short: "Print the contents of a chunk file",
Long: "Print the contents of a chunk file",
Flags: setupCatFlags,
Nargs: 1,
}
func setupCatFlags() *flag.FlagSet {
catFlagSet := flag.NewFlagSet("cat", flag.ExitOnError)
catFlagSet.BoolVar(&catRaw, "raw", false, "If true, includes the raw binary version of each chunk in the nbs file")
catFlagSet.BoolVar(&catNoShow, "no-show", false, "If true, skips printing of the value")
catFlagSet.BoolVar(&catHashesOnly, "hashes-only", false, "If true, only prints the b32 hashes")
catFlagSet.BoolVar(&catDecomp, "decompressed", false, "If true, includes the decompressed binary version of each chunk in the nbs file")
return catFlagSet
}
type footer struct {
chunkCnt uint32
uncompSize uint64
magicMatch bool
}
type prefixIndex struct {
hashPrefix []byte
chunkIndex uint32
}
type chunkData struct {
compressed []byte
uncompressed []byte
dataOffset uint64
crc uint32
decompSuccess bool
}
func runCat(ctx context.Context, args []string) int {
if len(args) < 1 {
fmt.Fprintln(os.Stderr, "Not enough arguments")
return 0
}
chunkFile := args[0]
_, err := os.Stat(chunkFile)
if err != nil {
fmt.Fprintln(os.Stderr, chunkFile+" does not exist")
return 1
}
fileBytes, err := ioutil.ReadFile(chunkFile)
if err != nil {
fmt.Fprintln(os.Stderr, "Failed to read "+chunkFile, err)
return 1
}
//read the file backwards
pos := len(fileBytes)
pos, footer := parseFooter(fileBytes, pos)
pos, suffixes := parseChunkSuffixes(fileBytes, pos, int(footer.chunkCnt))
pos, sizes := parseChunkSizes(fileBytes, pos, int(footer.chunkCnt))
pos, pi := parsePrefixIndices(fileBytes, pos, int(footer.chunkCnt))
pos, cd := parseChunks(fileBytes, pos, sizes)
fmt.Println("Info for file", chunkFile+":")
fmt.Printf(" chunk count: %d\n", footer.chunkCnt)
fmt.Printf(" total uncompressed chunk size: %d\n", footer.uncompSize)
fmt.Printf(" magic number matches: %t\n", footer.magicMatch)
fmt.Println()
fmt.Println("Prefix Indices:")
for i, currPI := range pi {
var hashData [20]byte
cidx := currPI.chunkIndex
copy(hashData[:], currPI.hashPrefix)
copy(hashData[prefixSize:], suffixes[cidx])
b32Hash := b32Str(hashData[:])
currCD := cd[cidx]
if catHashesOnly {
fmt.Println("hash:", b32Hash, "offset:", currCD.dataOffset, "size:", len(currCD.compressed))
continue
}
fmt.Printf(" prefixIndex[%d].hash: (HEX) %s (B32) %s\n", i, hexStr(hashData[:]), b32Hash)
fmt.Printf(" prefixIndex[%d].hash.prefix: (HEX) %s\n", i, hexStr(currPI.hashPrefix))
fmt.Printf(" prefixIndex[%d].hash.suffix: (HEX) %s\n", i, hexStr(suffixes[cidx]))
fmt.Println()
fmt.Printf(" prefixIndex[%d] references chunk[%d]:\n", i, cidx)
chunk := chunks.NewChunkWithHash(hashData, currCD.uncompressed)
//Want a clean db every loop
sp, _ := spec.ForDatabase("mem")
db := sp.GetDatabase(ctx)
value := types.DecodeValue(chunk, db)
fmt.Printf(" chunk[%d].raw.len: %d\n", cidx, len(currCD.compressed))
if catRaw {
fmt.Printf(" chunk[%d].raw.crc: %08x\n", cidx, currCD.crc)
fmt.Printf(" chunk[%d].raw.data:\n", cidx)
fmt.Println(hexView(currCD.compressed, " "))
}
fmt.Printf(" chunk[%d].decomp.len: %d\n", cidx, len(currCD.uncompressed))
if catDecomp {
fmt.Printf(" chunk[%d].decomp.data:\n", cidx)
fmt.Println(hexView(currCD.uncompressed, " "))
}
if !catNoShow {
fmt.Printf(" chunk[%d].value.kind: %s\n", cidx, value.Kind())
fmt.Printf(" chunk[%d].value:\n\n", cidx)
printValue(ctx, os.Stdout, value, filepath.Dir(chunkFile)+"::#"+b32Hash)
fmt.Println()
}
fmt.Println()
}
if pos != 0 {
panic("Didn't read the whole file")
}
return 0
}
func parseFooter(bytes []byte, pos int) (int, footer) {
magicBytes := bytes[pos-magicSize : pos]
pos -= magicSize
totalSizeBytes := bytes[pos-totalUncmpSize : pos]
pos -= totalUncmpSize
chunkCntBytes := bytes[pos-chunkCntSize : pos]
pos -= chunkCntSize
return pos, footer{
chunkCnt: binary.BigEndian.Uint32(chunkCntBytes),
uncompSize: binary.BigEndian.Uint64(totalSizeBytes),
magicMatch: binary.BigEndian.Uint64(magicBytes) == magicNumber,
}
}
func parsePrefixIndices(bytes []byte, pos, numChunks int) (int, []prefixIndex) {
var hashPrefixes [][]byte
var ordinals []uint32
for i := 0; i < numChunks; i++ {
ordinalBytes := bytes[pos-ordinalSize : pos]
pos -= ordinalSize
hashPrefixBytes := bytes[pos-prefixSize : pos]
pos -= prefixSize
hashPrefixes = append(hashPrefixes, hashPrefixBytes)
ordinals = append(ordinals, binary.BigEndian.Uint32(ordinalBytes))
}
var indices []prefixIndex
for i := numChunks - 1; i >= 0; i-- {
indices = append(indices, prefixIndex{
hashPrefix: hashPrefixes[i],
chunkIndex: ordinals[i],
})
}
return pos, indices
}
func parseChunkSuffixes(bytes []byte, pos, numChunks int) (int, [][]byte) {
pos -= suffixSize * numChunks
var suffixes [][]byte
for i := 0; i < numChunks; i++ {
start := pos + (i * suffixSize)
suffixes = append(suffixes, bytes[start:start+suffixSize])
}
return pos, suffixes
}
func parseChunkSizes(bytes []byte, pos, numChunks int) (int, []int) {
pos -= chunkSizeSize * numChunks
var sizes []int
for i := 0; i < numChunks; i++ {
start := pos + (i * chunkSizeSize)
sizeBytes := bytes[start : start+chunkSizeSize]
sizes = append(sizes, int(binary.BigEndian.Uint32(sizeBytes)))
}
return pos, sizes
}
func parseChunks(bytes []byte, pos int, sizes []int) (int, []chunkData) {
var crcs []uint32
var offsets []uint64
var chunkBytes [][]byte
for i := 0; i < len(sizes); i++ {
size := sizes[len(sizes)-i-1]
crcBytes := bytes[pos-crcSize : pos]
offset := uint64(pos - size)
dataBytes := bytes[offset : pos-crcSize]
pos -= size
crcValInFile := binary.BigEndian.Uint32(crcBytes)
crcOfData := crc(dataBytes)
if crcValInFile != crcOfData {
panic("CRC MISMATCH!!!")
}
chunkBytes = append(chunkBytes, dataBytes)
crcs = append(crcs, crcValInFile)
offsets = append(offsets, offset)
}
var cd []chunkData
for i := len(sizes) - 1; i >= 0; i-- {
uncompressed, err := snappy.Decode(nil, chunkBytes[i])
cd = append(cd, chunkData{
compressed: chunkBytes[i],
uncompressed: uncompressed,
crc: crcs[i],
dataOffset: offsets[i],
decompSuccess: err == nil,
})
}
return pos, cd
}
func printValue(ctx context.Context, w io.Writer, v types.Value, valSpec string) {
defer func() {
if r := recover(); r != nil {
msg := " Failed to write the value " + valSpec + "\n"
io.WriteString(w, msg)
}
}()
types.WriteEncodedValue(ctx, w, v)
}
func hexStr(bytes []byte) string {
return hex.EncodeToString(bytes)
}
const bytesPerRow = 16
func hexView(bytes []byte, indent string) string {
str := ""
for i := 0; i < len(bytes); i += bytesPerRow {
rowLen := min(16, len(bytes)-i)
rowBytes := bytes[i : i+rowLen]
str += indent + hexViewRow(i, rowBytes) + "\n"
}
return str
}
func hexViewRow(firstByteIndex int, rowBytes []byte) string {
addr := fmt.Sprintf("%04x", firstByteIndex)
hexWords := ""
for i, b := range rowBytes {
hexWords += fmt.Sprintf("%02x", b)
if i%2 == 1 {
hexWords += " "
}
if i%8 == 7 {
hexWords += " "
}
}
hexWidth := (bytesPerRow * 2) + (bytesPerRow)/2 + (bytesPerRow)/8
var charRep []byte
for _, b := range rowBytes {
if b < 32 || b > 126 {
charRep = append(charRep, byte('.'))
} else {
charRep = append(charRep, b)
}
}
formatStr := `%s: %-` + strconv.Itoa(hexWidth) + `s %s`
return fmt.Sprintf(formatStr, addr, hexWords, charRep)
}
var b32encoder = base32.NewEncoding("0123456789abcdefghijklmnopqrstuv")
func b32Str(bytes []byte) string {
return b32encoder.EncodeToString(bytes)
}
var crcTable = crc32.MakeTable(crc32.Castagnoli)
func crc(b []byte) uint32 {
return crc32.Update(0, crcTable, b)
}
-85
View File
@@ -1,85 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bufio"
"context"
"errors"
"fmt"
"os"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var allowDupe bool
var nomsCommit = &util.Command{
Run: runCommit,
UsageLine: "commit [options] [absolute-path] <dataset>",
Short: "Commits a specified value as head of the dataset",
Long: "If absolute-path is not provided, then it is read from stdin. See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the dataset and absolute-path arguments.",
Flags: setupCommitFlags,
Nargs: 1, // if absolute-path not present we read it from stdin
}
func setupCommitFlags() *flag.FlagSet {
commitFlagSet := flag.NewFlagSet("commit", flag.ExitOnError)
commitFlagSet.BoolVar(&allowDupe, "allow-dupe", false, "creates a new commit, even if it would be identical (modulo metadata and parents) to the existing HEAD.")
spec.RegisterCommitMetaFlags(commitFlagSet)
verbose.RegisterVerboseFlags(commitFlagSet)
return commitFlagSet
}
func runCommit(ctx context.Context, args []string) int {
cfg := config.NewResolver()
db, ds, err := cfg.GetDataset(ctx, args[len(args)-1])
d.CheckError(err)
defer db.Close()
var path string
if len(args) == 2 {
path = args[0]
} else {
readPath, _, err := bufio.NewReader(os.Stdin).ReadLine()
d.CheckError(err)
path = string(readPath)
}
absPath, err := spec.NewAbsolutePath(path)
d.CheckError(err)
value := absPath.Resolve(ctx, db)
if value == nil {
d.CheckErrorNoUsage(errors.New(fmt.Sprintf("Error resolving value: %s", path)))
}
oldCommitRef, oldCommitExists := ds.MaybeHeadRef()
if oldCommitExists {
head := ds.HeadValue()
if head.Hash() == value.Hash() && !allowDupe {
fmt.Fprintf(os.Stdout, "Commit aborted - allow-dupe is set to off and this commit would create a duplicate\n")
return 0
}
}
meta, err := spec.CreateCommitMetaStruct(ctx, db, "", "", nil, nil)
d.CheckErrorNoUsage(err)
ds, err = db.Commit(ctx, ds, value, datas.CommitOptions{Meta: meta})
d.CheckErrorNoUsage(err)
if oldCommitExists {
fmt.Fprintf(os.Stdout, "New head #%v (was #%v)\n", ds.HeadRef().TargetHash().String(), oldCommitRef.TargetHash().String())
} else {
fmt.Fprintf(os.Stdout, "New head #%v\n", ds.HeadRef().TargetHash().String())
}
return 0
}
-217
View File
@@ -1,217 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"os"
"testing"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
type nomsCommitTestSuite struct {
clienttest.ClientTestSuite
}
func TestNomsCommit(t *testing.T) {
suite.Run(t, &nomsCommitTestSuite{})
}
func (s *nomsCommitTestSuite) setupDataset(name string, doCommit bool) (sp spec.Spec, ref types.Ref) {
var err error
sp, err = spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, name))
s.NoError(err)
v := types.String("testcommit")
ref = sp.GetDatabase(context.Background()).WriteValue(context.Background(), v)
if doCommit {
_, err = sp.GetDatabase(context.Background()).CommitValue(context.Background(), sp.GetDataset(context.Background()), ref)
s.NoError(err)
}
return
}
func (s *nomsCommitTestSuite) TestNomsCommitReadPathFromStdin() {
sp, ref := s.setupDataset("commitTestStdin", false)
defer sp.Close()
_, ok := sp.GetDataset(context.Background()).MaybeHead()
s.False(ok, "should not have a commit")
oldStdin := os.Stdin
newStdin, stdinWriter, err := os.Pipe()
s.NoError(err)
os.Stdin = newStdin
defer func() {
os.Stdin = oldStdin
}()
go func() {
stdinWriter.Write([]byte("#" + ref.TargetHash().String() + "\n"))
stdinWriter.Close()
}()
stdoutString, stderrString := s.MustRun(main, []string{"commit", sp.String()})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
sp, _ = spec.ForDataset(sp.String())
defer sp.Close()
commit, ok := sp.GetDataset(context.Background()).MaybeHead()
s.True(ok, "should have a commit now")
value := commit.Get(datas.ValueField)
s.True(value.Hash() == ref.TargetHash(), "commit.value hash == writevalue hash")
meta := commit.Get(datas.MetaField).(types.Struct)
s.NotEmpty(meta.Get("date"))
}
func (s *nomsCommitTestSuite) TestNomsCommitToDatasetWithoutHead() {
sp, ref := s.setupDataset("commitTest", false)
defer sp.Close()
_, ok := sp.GetDataset(context.Background()).MaybeHead()
s.False(ok, "should not have a commit")
stdoutString, stderrString := s.MustRun(main, []string{"commit", "#" + ref.TargetHash().String(), sp.String()})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
sp, _ = spec.ForDataset(sp.String())
defer sp.Close()
commit, ok := sp.GetDataset(context.Background()).MaybeHead()
s.True(ok, "should have a commit now")
value := commit.Get(datas.ValueField)
s.True(value.Hash() == ref.TargetHash(), "commit.value hash == writevalue hash")
meta := commit.Get(datas.MetaField).(types.Struct)
s.NotEmpty(meta.Get("date"))
}
func structFieldEqual(old, now types.Struct, field string) bool {
oldValue, oldOk := old.MaybeGet(field)
nowValue, nowOk := now.MaybeGet(field)
return oldOk && nowOk && nowValue.Equals(oldValue)
}
func (s *nomsCommitTestSuite) runDuplicateTest(allowDuplicate bool) {
dsName := "commitTestDuplicate"
sp, ref := s.setupDataset(dsName, true)
defer sp.Close()
_, ok := sp.GetDataset(context.Background()).MaybeHeadValue()
s.True(ok, "should have a commit")
cliOptions := []string{"commit"}
if allowDuplicate {
cliOptions = append(cliOptions, "--allow-dupe=1")
}
cliOptions = append(cliOptions, dsName+".value", sp.String())
stdoutString, stderrString, err := s.Run(main, cliOptions)
s.Nil(err)
s.Empty(stderrString)
if allowDuplicate {
s.NotContains(stdoutString, "Commit aborted")
s.Contains(stdoutString, "New head #")
} else {
s.Contains(stdoutString, "Commit aborted")
}
sp, _ = spec.ForDataset(sp.String())
defer sp.Close()
value, ok := sp.GetDataset(context.Background()).MaybeHeadValue()
s.True(ok, "should still have a commit")
s.True(value.Hash() == ref.Hash(), "commit.value hash == previous commit hash")
}
func (s *nomsCommitTestSuite) TestNomsCommitDuplicate() {
s.runDuplicateTest(false)
s.runDuplicateTest(true)
}
func (s *nomsCommitTestSuite) TestNomsCommitMetadata() {
dsName := "commitTestMetadata"
sp, _ := s.setupDataset(dsName, true)
defer sp.Close()
metaOld := sp.GetDataset(context.Background()).Head().Get(datas.MetaField).(types.Struct)
stdoutString, stderrString, err := s.Run(main, []string{"commit", "--allow-dupe=1", "--message=foo", dsName + ".value", sp.String()})
s.Nil(err)
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
sp, _ = spec.ForDataset(sp.String())
defer sp.Close()
metaNew := sp.GetDataset(context.Background()).Head().Get(datas.MetaField).(types.Struct)
s.False(metaOld.Equals(metaNew), "meta didn't change")
s.False(structFieldEqual(metaOld, metaNew, "date"), "date didn't change")
s.False(structFieldEqual(metaOld, metaNew, "message"), "message didn't change")
s.True(metaNew.Get("message").Equals(types.String("foo")), "message wasn't set")
metaOld = metaNew
stdoutString, stderrString = s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=message=bar", "--date=" + spec.CommitMetaDateFormat[:20], dsName + ".value", sp.String()})
s.Empty(stderrString)
s.Contains(stdoutString, "New head #")
sp, _ = spec.ForDataset(sp.String())
defer sp.Close()
metaNew = sp.GetDataset(context.Background()).Head().Get(datas.MetaField).(types.Struct)
s.False(metaOld.Equals(metaNew), "meta didn't change")
s.False(structFieldEqual(metaOld, metaNew, "date"), "date didn't change")
s.False(structFieldEqual(metaOld, metaNew, "message"), "message didn't change")
s.True(metaNew.Get("message").Equals(types.String("bar")), "message wasn't set")
}
func (s *nomsCommitTestSuite) TestNomsCommitHashNotFound() {
sp, _ := s.setupDataset("commitTestBadHash", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "#9ei6fbrs0ujo51vifd3f2eebufo4lgdu", sp.String()})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitMetadataBadDateFormat() {
sp, ref := s.setupDataset("commitTestMetadata", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--date=a", "#" + ref.TargetHash().String(), sp.String()})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitInvalidMetadataPaths() {
sp, ref := s.setupDataset("commitTestMetadataPaths", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta-p=#beef", "#" + ref.TargetHash().String(), sp.String()})
})
}
func (s *nomsCommitTestSuite) TestNomsCommitInvalidMetadataFieldName() {
sp, ref := s.setupDataset("commitTestMetadataFields", true)
defer sp.Close()
s.Panics(func() {
s.MustRun(main, []string{"commit", "--allow-dupe=1", "--meta=_foo=bar", "#" + ref.TargetHash().String(), sp.String()})
})
}
-40
View File
@@ -1,40 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"os"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
flag "github.com/juju/gnuflag"
)
var nomsConfig = &util.Command{
Run: runConfig,
UsageLine: "config ",
Short: "Display noms config info",
Long: "Prints the active configuration if a .nomsconfig file is present",
Flags: setupConfigFlags,
Nargs: 0,
}
func setupConfigFlags() *flag.FlagSet {
return flag.NewFlagSet("config", flag.ExitOnError)
}
func runConfig(ctx context.Context, args []string) int {
c, err := config.FindNomsConfig()
if err == config.ErrNoConfig {
fmt.Fprintf(os.Stdout, "no config active\n")
} else {
d.CheckError(err)
fmt.Fprintf(os.Stdout, "%s\n", c.String())
}
return 0
}
-66
View File
@@ -1,66 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/util/outputpager"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var stat bool
var nomsDiff = &util.Command{
Run: runDiff,
UsageLine: "diff [--stat] <object1> <object2>",
Short: "Shows the difference between two objects",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object arguments.",
Flags: setupDiffFlags,
Nargs: 2,
}
func setupDiffFlags() *flag.FlagSet {
diffFlagSet := flag.NewFlagSet("diff", flag.ExitOnError)
diffFlagSet.BoolVar(&stat, "stat", false, "Writes a summary of the changes instead")
outputpager.RegisterOutputpagerFlags(diffFlagSet)
verbose.RegisterVerboseFlags(diffFlagSet)
return diffFlagSet
}
func runDiff(ctx context.Context, args []string) int {
cfg := config.NewResolver()
db1, value1, err := cfg.GetPath(ctx, args[0])
d.CheckErrorNoUsage(err)
if value1 == nil {
d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[0]))
}
defer db1.Close()
db2, value2, err := cfg.GetPath(ctx, args[1])
d.CheckErrorNoUsage(err)
if value2 == nil {
d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[1]))
}
defer db2.Close()
if stat {
diff.Summary(ctx, value1, value2)
return 0
}
pgr := outputpager.Start()
defer pgr.Stop()
diff.PrintDiff(ctx, pgr.Writer, value1, value2, false)
return 0
}
-76
View File
@@ -1,76 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"testing"
"strings"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
type nomsDiffTestSuite struct {
clienttest.ClientTestSuite
}
func TestNomsDiff(t *testing.T) {
suite.Run(t, &nomsDiffTestSuite{})
}
func (s *nomsDiffTestSuite) TestNomsDiffOutputNotTruncated() {
sp, err := spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, "diffTest"))
s.NoError(err)
defer sp.Close()
ds, err := addCommit(sp.GetDataset(context.Background()), "first commit")
s.NoError(err)
r1 := spec.CreateValueSpecString("nbs", s.DBDir, "#"+ds.HeadRef().TargetHash().String())
ds, err = addCommit(ds, "second commit")
s.NoError(err)
r2 := spec.CreateValueSpecString("nbs", s.DBDir, "#"+ds.HeadRef().TargetHash().String())
out, _ := s.MustRun(main, []string{"diff", r1, r2})
s.True(strings.HasSuffix(out, "\"second commit\"\n }\n"), out)
}
func (s *nomsDiffTestSuite) TestNomsDiffStat() {
sp, err := spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, "diffStatTest"))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
ds, err := addCommit(sp.GetDataset(context.Background()), "first commit")
s.NoError(err)
r1 := spec.CreateHashSpecString("nbs", s.DBDir, ds.HeadRef().TargetHash())
ds, err = addCommit(ds, "second commit")
s.NoError(err)
r2 := spec.CreateHashSpecString("nbs", s.DBDir, ds.HeadRef().TargetHash())
out, _ := s.MustRun(main, []string{"diff", "--stat", r1, r2})
s.Contains(out, "Comparing commit values")
s.Contains(out, "1 insertion (100.00%), 1 deletion (100.00%), 0 changes (0.00%), (1 value vs 1 value)")
out, _ = s.MustRun(main, []string{"diff", "--stat", r1 + ".value", r2 + ".value"})
s.NotContains(out, "Comparing commit values")
ds, err = db.CommitValue(context.Background(), ds, types.NewList(context.Background(), db, types.Float(1), types.Float(2), types.Float(3), types.Float(4)))
s.NoError(err)
r3 := spec.CreateHashSpecString("nbs", s.DBDir, ds.HeadRef().TargetHash()) + ".value"
ds, err = db.CommitValue(context.Background(), ds, types.NewList(context.Background(), db, types.Float(1), types.Float(222), types.Float(4)))
s.NoError(err)
r4 := spec.CreateHashSpecString("nbs", s.DBDir, ds.HeadRef().TargetHash()) + ".value"
out, _ = s.MustRun(main, []string{"diff", "--stat", r3, r4})
s.Contains(out, "1 insertion (25.00%), 2 deletions (50.00%), 0 changes (0.00%), (4 values vs 3 values)")
}
-67
View File
@@ -1,67 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var toDelete string
var nomsDs = &util.Command{
Run: runDs,
UsageLine: "ds [<database> | -d <dataset>]",
Short: "Noms dataset management",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database and dataset arguments.",
Flags: setupDsFlags,
Nargs: 0,
}
func setupDsFlags() *flag.FlagSet {
dsFlagSet := flag.NewFlagSet("ds", flag.ExitOnError)
dsFlagSet.StringVar(&toDelete, "d", "", "dataset to delete")
verbose.RegisterVerboseFlags(dsFlagSet)
return dsFlagSet
}
func runDs(ctx context.Context, args []string) int {
cfg := config.NewResolver()
if toDelete != "" {
db, set, err := cfg.GetDataset(ctx, toDelete)
d.CheckError(err)
defer db.Close()
oldCommitRef, errBool := set.MaybeHeadRef()
if !errBool {
d.CheckError(fmt.Errorf("Dataset %v not found", set.ID()))
}
_, err = set.Database().Delete(ctx, set)
d.CheckError(err)
fmt.Printf("Deleted %v (was #%v)\n", toDelete, oldCommitRef.TargetHash().String())
} else {
dbSpec := ""
if len(args) >= 1 {
dbSpec = args[0]
}
store, err := cfg.GetDatabase(ctx, dbSpec)
d.CheckError(err)
defer store.Close()
store.Datasets(ctx).IterAll(ctx, func(k, v types.Value) {
fmt.Println(k)
})
}
return 0
}
-86
View File
@@ -1,86 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"testing"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/nbs"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
func TestDs(t *testing.T) {
suite.Run(t, &nomsDsTestSuite{})
}
type nomsDsTestSuite struct {
clienttest.ClientTestSuite
}
func (s *nomsDsTestSuite) TestEmptyNomsDs() {
dir := s.DBDir
cs := nbs.NewLocalStore(context.Background(), dir, clienttest.DefaultMemTableSize)
ds := datas.NewDatabase(cs)
ds.Close()
dbSpec := spec.CreateDatabaseSpecString("nbs", dir)
rtnVal, _ := s.MustRun(main, []string{"ds", dbSpec})
s.Equal("", rtnVal)
}
func (s *nomsDsTestSuite) TestNomsDs() {
dir := s.DBDir
cs := nbs.NewLocalStore(context.Background(), dir, clienttest.DefaultMemTableSize)
db := datas.NewDatabase(cs)
id := "testdataset"
set := db.GetDataset(context.Background(), id)
set, err := db.CommitValue(context.Background(), set, types.String("Commit Value"))
s.NoError(err)
id2 := "testdataset2"
set2 := db.GetDataset(context.Background(), id2)
set2, err = db.CommitValue(context.Background(), set2, types.String("Commit Value2"))
s.NoError(err)
err = db.Close()
s.NoError(err)
dbSpec := spec.CreateDatabaseSpecString("nbs", dir)
datasetName := spec.CreateValueSpecString("nbs", dir, id)
dataset2Name := spec.CreateValueSpecString("nbs", dir, id2)
// both datasets show up
rtnVal, _ := s.MustRun(main, []string{"ds", dbSpec})
s.Equal(id+"\n"+id2+"\n", rtnVal)
// both datasets again, to make sure printing doesn't change them
rtnVal, _ = s.MustRun(main, []string{"ds", dbSpec})
s.Equal(id+"\n"+id2+"\n", rtnVal)
// delete one dataset, print message at delete
rtnVal, _ = s.MustRun(main, []string{"ds", "-d", datasetName})
s.Equal("Deleted "+datasetName+" (was #ld4fuj44sd4gu0pepn7h5hga72282v81)\n", rtnVal)
// print datasets, just one left
rtnVal, _ = s.MustRun(main, []string{"ds", dbSpec})
s.Equal(id2+"\n", rtnVal)
// delete the second dataset
rtnVal, _ = s.MustRun(main, []string{"ds", "-d", dataset2Name})
s.Equal("Deleted "+dataset2Name+" (was #43qqlvkiainn1jf53g705622nndu1bje)\n", rtnVal)
// print datasets, none left
rtnVal, _ = s.MustRun(main, []string{"ds", dbSpec})
s.Equal("", rtnVal)
}
-121
View File
@@ -1,121 +0,0 @@
// Copyright 2018 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
)
func nomsList(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
list := noms.Command("list", "interact with lists")
listNew := list.Command("new", "creates a new list")
newDb := listNew.Arg("database", "spec to db to create list within").Required().String()
newEntries := listNew.Arg("items", "items to insert").Strings()
listAppend := list.Command("append", "appends one or more items to a list")
appendSpec := listAppend.Arg("spec", "value spec for the list to edit").Required().String()
appendEntries := listAppend.Arg("items", "items to insert").Strings()
listInsert := list.Command("insert", "inserts one or more items into a list")
insertAt := listInsert.Arg("pos", "position to insert new items at").Required().Uint64()
insertSpec := listInsert.Arg("spec", "value spec for the list to edit").Required().String()
insertEntries := listInsert.Arg("items", "items to insert").Strings()
listDel := list.Command("del", "removes one or more items from the list")
delSpec := listDel.Arg("spec", "value spec for the list to edit").Required().String()
delPos := listDel.Arg("pos", "index to remove items at").Required().Uint64()
delLen := listDel.Arg("len", "number of items to remove").Required().Uint64()
return list, func(input string) int {
switch input {
case listNew.FullCommand():
return nomsListNew(ctx, *newDb, *newEntries)
case listAppend.FullCommand():
return nomsListAppend(ctx, *appendSpec, *appendEntries)
case listInsert.FullCommand():
return nomsListInsert(ctx, *insertSpec, *insertAt, *insertEntries)
case listDel.FullCommand():
return nomsListDel(ctx, *delSpec, *delPos, *delLen)
}
d.Panic("notreached")
return 1
}
}
func nomsListNew(ctx context.Context, dbStr string, args []string) int {
sp, err := spec.ForDatabase(dbStr)
d.PanicIfError(err)
applyListInserts(ctx, sp, types.NewList(ctx, sp.GetDatabase(ctx)), nil, 0, args)
return 0
}
func nomsListAppend(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
if list, ok := rootVal.(types.List); ok {
applyListInserts(ctx, sp, rootVal, basePath, list.Len(), args)
} else {
d.CheckErrorNoUsage(fmt.Errorf("%s is not a list", specStr))
}
return 0
}
func nomsListInsert(ctx context.Context, specStr string, pos uint64, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
applyListInserts(ctx, sp, rootVal, basePath, pos, args)
return 0
}
func nomsListDel(ctx context.Context, specStr string, pos uint64, len uint64) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
patch := diff.Patch{}
// TODO: if len-pos is large this will start to become problematic
for i := pos; i < pos+len; i++ {
patch = append(patch, diff.Difference{
Path: append(basePath, types.NewIndexPath(types.Float(i))),
ChangeType: types.DiffChangeRemoved,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
return 0
}
func applyListInserts(ctx context.Context, sp spec.Spec, rootVal types.Value, basePath types.Path, pos uint64, args []string) {
if rootVal == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at: %s", sp.String()))
return
}
db := sp.GetDatabase(ctx)
patch := diff.Patch{}
for i := 0; i < len(args); i++ {
vv, err := argumentToValue(ctx, args[i], db)
if err != nil {
d.CheckError(fmt.Errorf("Invalid value: %s at position %d: %s", args[i], i, err))
}
patch = append(patch, diff.Difference{
Path: append(basePath, types.NewIndexPath(types.Float(pos+uint64(i)))),
ChangeType: types.DiffChangeAdded,
NewValue: vv,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
}
-398
View File
@@ -1,398 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"math"
"os"
"strings"
"time"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/datetime"
"github.com/attic-labs/noms/go/util/functions"
"github.com/attic-labs/noms/go/util/outputpager"
"github.com/attic-labs/noms/go/util/verbose"
"github.com/attic-labs/noms/go/util/writers"
flag "github.com/juju/gnuflag"
"github.com/mgutz/ansi"
)
var (
useColor = false
color int
maxLines int
maxCommits int
oneline bool
showGraph bool
showValue bool
)
const parallelism = 16
var nomsLog = &util.Command{
Run: runLog,
UsageLine: "log [options] <path-spec>",
Short: "Displays the history of a path",
Long: "Displays the history of a path. See Spelling Values at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the <path-spec> parameter.",
Flags: setupLogFlags,
Nargs: 1,
}
func setupLogFlags() *flag.FlagSet {
logFlagSet := flag.NewFlagSet("log", flag.ExitOnError)
logFlagSet.IntVar(&color, "color", -1, "value of 1 forces color on, 0 forces color off")
logFlagSet.IntVar(&maxLines, "max-lines", 9, "max number of lines to show per commit (-1 for all lines)")
logFlagSet.IntVar(&maxCommits, "n", 0, "max number of commits to display (0 for all commits)")
logFlagSet.BoolVar(&oneline, "oneline", false, "show a summary of each commit on a single line")
logFlagSet.BoolVar(&showGraph, "graph", false, "show ascii-based commit hierarchy on left side of output")
logFlagSet.BoolVar(&showValue, "show-value", false, "show commit value rather than diff information")
logFlagSet.StringVar(&tzName, "tz", "local", "display formatted date comments in specified timezone, must be: local or utc")
outputpager.RegisterOutputpagerFlags(logFlagSet)
verbose.RegisterVerboseFlags(logFlagSet)
return logFlagSet
}
func runLog(ctx context.Context, args []string) int {
useColor = shouldUseColor()
cfg := config.NewResolver()
tz, _ := locationFromTimezoneArg(tzName, nil)
datetime.RegisterHRSCommenter(tz)
resolved := cfg.ResolvePathSpec(args[0])
sp, err := spec.ForPath(resolved)
d.CheckErrorNoUsage(err)
defer sp.Close()
pinned, ok := sp.Pin(ctx)
if !ok {
fmt.Fprintf(os.Stderr, "Cannot resolve spec: %s\n", args[0])
return 1
}
defer pinned.Close()
database := pinned.GetDatabase(ctx)
absPath := pinned.Path
path := absPath.Path
if len(path) == 0 {
path = types.MustParsePath(".value")
}
origCommit, ok := database.ReadValue(ctx, absPath.Hash).(types.Struct)
if !ok || !datas.IsCommit(origCommit) {
d.CheckError(fmt.Errorf("%s does not reference a Commit object", args[0]))
}
iter := NewCommitIterator(database, origCommit)
displayed := 0
if maxCommits <= 0 {
maxCommits = math.MaxInt32
}
bytesChan := make(chan chan []byte, parallelism)
var done = false
go func() {
for ln, ok := iter.Next(ctx); !done && ok && displayed < maxCommits; ln, ok = iter.Next(ctx) {
ch := make(chan []byte)
bytesChan <- ch
go func(ch chan []byte, node LogNode) {
buff := &bytes.Buffer{}
printCommit(ctx, node, path, buff, database, tz)
ch <- buff.Bytes()
}(ch, ln)
displayed++
}
close(bytesChan)
}()
pgr := outputpager.Start()
defer pgr.Stop()
for ch := range bytesChan {
commitBuff := <-ch
_, err := io.Copy(pgr.Writer, bytes.NewReader(commitBuff))
if err != nil {
done = true
for range bytesChan {
// drain the output
}
}
}
return 0
}
// Prints the information for one commit in the log, including ascii graph on left side of commits if
// -graph arg is true.
func printCommit(ctx context.Context, node LogNode, path types.Path, w io.Writer, db datas.Database, tz *time.Location) (err error) {
maxMetaFieldNameLength := func(commit types.Struct) int {
maxLen := 0
if m, ok := commit.MaybeGet(datas.MetaField); ok {
meta := m.(types.Struct)
types.TypeOf(meta).Desc.(types.StructDesc).IterFields(func(name string, t *types.Type, optional bool) {
maxLen = max(maxLen, len(name))
})
}
return maxLen
}
hashStr := node.commit.Hash().String()
if useColor {
hashStr = ansi.Color("commit "+hashStr, "red+h")
}
maxFieldNameLen := maxMetaFieldNameLength(node.commit)
parentLabel := "Parent"
parentValue := "None"
parents := commitRefsFromSet(ctx, node.commit.Get(datas.ParentsField).(types.Set))
if len(parents) > 1 {
pstrings := make([]string, len(parents))
for i, p := range parents {
pstrings[i] = p.TargetHash().String()
}
parentLabel = "Merge"
parentValue = strings.Join(pstrings, " ")
} else if len(parents) == 1 {
parentValue = parents[0].TargetHash().String()
}
if oneline {
parentStr := fmt.Sprintf("%s %s", parentLabel+":", parentValue)
fmt.Fprintf(w, "%s (%s)\n", hashStr, parentStr)
return
}
maxFieldNameLen = max(maxFieldNameLen, len(parentLabel))
parentStr := fmt.Sprintf("%-*s %s", maxFieldNameLen+1, parentLabel+":", parentValue)
fmt.Fprintf(w, "%s%s\n", genGraph(node, 0), hashStr)
fmt.Fprintf(w, "%s%s\n", genGraph(node, 1), parentStr)
lineno := 1
if maxLines != 0 {
lineno, err = writeMetaLines(ctx, node, maxLines, lineno, maxFieldNameLen, w, tz)
if err != nil && err != writers.MaxLinesErr {
fmt.Fprintf(w, "error: %s\n", err)
return
}
if showValue {
_, err = writeCommitLines(ctx, node, path, maxLines, lineno, w, db)
} else {
_, err = writeDiffLines(ctx, node, path, db, maxLines, lineno, w)
}
}
return
}
// Generates ascii graph chars to display on the left side of the commit info if -graph arg is true.
func genGraph(node LogNode, lineno int) string {
if !showGraph {
return ""
}
// branchCount is the number of branches that we need to graph for this commit and determines the
// length of prefix string. The string will change from line to line to indicate whether the new
// branches are getting created or currently displayed branches need to be merged with other branches.
// Normally we want the maximum number of branches so we have enough room to display them all, however
// if node.Shrunk() is true, we only need to display the minimum number of branches.
branchCount := max(node.startingColCount, node.endingColCount)
if node.Shrunk() {
branchCount = min(node.startingColCount, node.endingColCount)
}
// Create the basic prefix string indicating the number of branches that are being tracked.
p := strings.Repeat("| ", max(branchCount, 1))
buf := []rune(p)
// The first line of a commit has a '*' in the graph to indicate what branch it resides in.
if lineno == 0 {
if node.Expanding() {
buf[(branchCount-1)*2] = ' '
}
buf[node.col*2] = '*'
return string(buf)
}
// If expanding, change all the '|' chars to '\' chars after the inserted branch
if node.Expanding() && lineno == 1 {
for i := node.newCols[0]; i < branchCount; i++ {
buf[(i*2)-1] = '\\'
buf[i*2] = ' '
}
}
// if one branch is getting folded into another, show '/' where necessary to indicate that.
if node.Shrinking() {
foldingDistance := node.foldedCols[1] - node.foldedCols[0]
ch := ' '
if lineno < foldingDistance+1 {
ch = '/'
}
for _, col := range node.foldedCols[1:] {
buf[(col*2)-1] = ch
buf[(col * 2)] = ' '
}
}
return string(buf)
}
func writeMetaLines(ctx context.Context, node LogNode, maxLines, lineno, maxLabelLen int, w io.Writer, tz *time.Location) (int, error) {
if m, ok := node.commit.MaybeGet(datas.MetaField); ok {
genPrefix := func(w *writers.PrefixWriter) []byte {
return []byte(genGraph(node, int(w.NumLines)))
}
meta := m.(types.Struct)
mlw := &writers.MaxLineWriter{Dest: w, MaxLines: uint32(maxLines), NumLines: uint32(lineno)}
pw := &writers.PrefixWriter{Dest: mlw, PrefixFunc: genPrefix, NeedsPrefix: true, NumLines: uint32(lineno)}
err := d.Try(func() {
types.TypeOf(meta).Desc.(types.StructDesc).IterFields(func(fieldName string, t *types.Type, optional bool) {
v := meta.Get(fieldName)
fmt.Fprintf(pw, "%-*s", maxLabelLen+2, strings.Title(fieldName)+":")
// Encode dates as formatted string if this is a top-level meta
// field of type datetime.DateTimeType
if types.TypeOf(v).Equals(datetime.DateTimeType) {
var dt datetime.DateTime
dt.UnmarshalNoms(ctx, v)
fmt.Fprintln(pw, dt.In(tz).Format(time.RFC3339))
} else {
types.WriteEncodedValue(ctx, pw, v)
}
fmt.Fprintln(pw)
})
})
return int(pw.NumLines), err
}
return lineno, nil
}
func writeCommitLines(ctx context.Context, node LogNode, path types.Path, maxLines, lineno int, w io.Writer, db datas.Database) (lineCnt int, err error) {
genPrefix := func(pw *writers.PrefixWriter) []byte {
return []byte(genGraph(node, int(pw.NumLines)+1))
}
mlw := &writers.MaxLineWriter{Dest: w, MaxLines: uint32(maxLines), NumLines: uint32(lineno)}
pw := &writers.PrefixWriter{Dest: mlw, PrefixFunc: genPrefix, NeedsPrefix: true, NumLines: uint32(lineno)}
v := path.Resolve(ctx, node.commit, db)
if v == nil {
pw.Write([]byte("<nil>\n"))
} else {
err = types.WriteEncodedValue(ctx, pw, v)
mlw.MaxLines = 0
if err != nil {
d.PanicIfNotType(writers.MaxLinesErr, err)
pw.NeedsPrefix = true
pw.Write([]byte("...\n"))
err = nil
} else {
pw.NeedsPrefix = false
pw.Write([]byte("\n"))
}
if !node.lastCommit {
pw.NeedsPrefix = true
pw.Write([]byte("\n"))
}
}
return int(pw.NumLines), err
}
func writeDiffLines(ctx context.Context, node LogNode, path types.Path, db datas.Database, maxLines, lineno int, w io.Writer) (lineCnt int, err error) {
genPrefix := func(w *writers.PrefixWriter) []byte {
return []byte(genGraph(node, int(w.NumLines)+1))
}
mlw := &writers.MaxLineWriter{Dest: w, MaxLines: uint32(maxLines), NumLines: uint32(lineno)}
pw := &writers.PrefixWriter{Dest: mlw, PrefixFunc: genPrefix, NeedsPrefix: true, NumLines: uint32(lineno)}
parents := node.commit.Get(datas.ParentsField).(types.Set)
var parent types.Value
if parents.Len() > 0 {
parent = parents.First(ctx)
}
if parent == nil {
_, err = fmt.Fprint(pw, "\n")
return 1, err
}
parentCommit := parent.(types.Ref).TargetValue(ctx, db).(types.Struct)
var old, neu types.Value
functions.All(
func() { old = path.Resolve(ctx, parentCommit, db) },
func() { neu = path.Resolve(ctx, node.commit, db) },
)
// TODO: It would be better to treat this as an add or remove, but that requires generalization
// of some of the code in PrintDiff() because it cannot tolerate nil parameters.
if neu == nil {
fmt.Fprintf(pw, "new (#%s%s) not found\n", node.commit.Hash().String(), path.String())
}
if old == nil {
fmt.Fprintf(pw, "old (#%s%s) not found\n", parentCommit.Hash().String(), path.String())
}
if old != nil && neu != nil {
err = diff.PrintDiff(ctx, pw, old, neu, true)
mlw.MaxLines = 0
if err != nil {
d.PanicIfNotType(err, writers.MaxLinesErr)
pw.NeedsPrefix = true
pw.Write([]byte("...\n"))
err = nil
}
}
if !node.lastCommit {
pw.NeedsPrefix = true
pw.Write([]byte("\n"))
}
return int(pw.NumLines), err
}
func shouldUseColor() bool {
if color != 1 && color != 0 {
return outputpager.IsStdoutTty()
}
return color == 1
}
func max(i, j int) int {
if i > j {
return i
}
return j
}
func min(i, j int) int {
if i < j {
return i
}
return j
}
func locationFromTimezoneArg(tz string, defaultTZ *time.Location) (*time.Location, error) {
switch tz {
case "local":
return time.Local, nil
case "utc":
return time.UTC, nil
case "":
return defaultTZ, nil
default:
return nil, errors.New("value must be: local or utc")
}
}
-371
View File
@@ -1,371 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"testing"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/attic-labs/noms/go/util/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
func TestNomsLog(t *testing.T) {
suite.Run(t, &nomsLogTestSuite{})
}
type nomsLogTestSuite struct {
clienttest.ClientTestSuite
}
func testCommitInResults(s *nomsLogTestSuite, str string, i int) {
sp, err := spec.ForDataset(str)
s.NoError(err)
defer sp.Close()
sp.GetDatabase(context.Background()).CommitValue(context.Background(), sp.GetDataset(context.Background()), types.Float(i))
s.NoError(err)
commit := sp.GetDataset(context.Background()).Head()
res, _ := s.MustRun(main, []string{"log", str})
s.Contains(res, commit.Hash().String())
}
func (s *nomsLogTestSuite) TestNomsLog() {
sp, err := spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, "dsTest"))
s.NoError(err)
defer sp.Close()
sp.GetDatabase(context.Background()) // create the database
s.Panics(func() { s.MustRun(main, []string{"log", sp.String()}) })
testCommitInResults(s, sp.String(), 1)
testCommitInResults(s, sp.String(), 2)
}
func (s *nomsLogTestSuite) TestNomsLogPath() {
sp, err := spec.ForPath(spec.CreateValueSpecString("nbs", s.DBDir, "dsTest.value.bar"))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
ds := sp.GetDataset(context.Background())
for i := 0; i < 3; i++ {
data := types.NewStruct("", types.StructData{
"bar": types.Float(i),
})
ds, err = db.CommitValue(context.Background(), ds, data)
s.NoError(err)
}
stdout, stderr := s.MustRun(main, []string{"log", "--show-value", sp.String()})
s.Empty(stderr)
test.EqualsIgnoreHashes(s.T(), pathValue, stdout)
stdout, stderr = s.MustRun(main, []string{"log", sp.String()})
s.Empty(stderr)
test.EqualsIgnoreHashes(s.T(), pathDiff, stdout)
}
func addCommit(ds datas.Dataset, v string) (datas.Dataset, error) {
return ds.Database().CommitValue(context.Background(), ds, types.String(v))
}
func addCommitWithValue(ds datas.Dataset, v types.Value) (datas.Dataset, error) {
return ds.Database().CommitValue(context.Background(), ds, v)
}
func addBranchedDataset(vrw types.ValueReadWriter, newDs, parentDs datas.Dataset, v string) (datas.Dataset, error) {
p := types.NewSet(context.Background(), vrw, parentDs.HeadRef())
return newDs.Database().Commit(context.Background(), newDs, types.String(v), datas.CommitOptions{Parents: p})
}
func mergeDatasets(vrw types.ValueReadWriter, ds1, ds2 datas.Dataset, v string) (datas.Dataset, error) {
p := types.NewSet(context.Background(), vrw, ds1.HeadRef(), ds2.HeadRef())
return ds1.Database().Commit(context.Background(), ds1, types.String(v), datas.CommitOptions{Parents: p})
}
func (s *nomsLogTestSuite) TestNArg() {
dsName := "nArgTest"
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
ds := sp.GetDatabase(context.Background()).GetDataset(context.Background(), dsName)
ds, err = addCommit(ds, "1")
h1 := ds.Head().Hash()
s.NoError(err)
ds, err = addCommit(ds, "2")
s.NoError(err)
h2 := ds.Head().Hash()
ds, err = addCommit(ds, "3")
s.NoError(err)
h3 := ds.Head().Hash()
dsSpec := spec.CreateValueSpecString("nbs", s.DBDir, dsName)
res, _ := s.MustRun(main, []string{"log", "-n1", dsSpec})
s.NotContains(res, h1.String())
res, _ = s.MustRun(main, []string{"log", "-n0", dsSpec})
s.Contains(res, h3.String())
s.Contains(res, h2.String())
s.Contains(res, h1.String())
vSpec := spec.CreateValueSpecString("nbs", s.DBDir, "#"+h3.String())
res, _ = s.MustRun(main, []string{"log", "-n1", vSpec})
s.NotContains(res, h1.String())
res, _ = s.MustRun(main, []string{"log", "-n0", vSpec})
s.Contains(res, h3.String())
s.Contains(res, h2.String())
s.Contains(res, h1.String())
}
func (s *nomsLogTestSuite) TestEmptyCommit() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
ds := db.GetDataset(context.Background(), "ds1")
meta := types.NewStruct("Meta", map[string]types.Value{
"longNameForTest": types.String("Yoo"),
"test2": types.String("Hoo"),
})
ds, err = db.Commit(context.Background(), ds, types.String("1"), datas.CommitOptions{Meta: meta})
s.NoError(err)
ds, err = db.Commit(context.Background(), ds, types.String("2"), datas.CommitOptions{})
s.NoError(err)
dsSpec := spec.CreateValueSpecString("nbs", s.DBDir, "ds1")
res, _ := s.MustRun(main, []string{"log", dsSpec})
test.EqualsIgnoreHashes(s.T(), metaRes1, res)
res, _ = s.MustRun(main, []string{"log", "--oneline", dsSpec})
test.EqualsIgnoreHashes(s.T(), metaRes2, res)
}
func (s *nomsLogTestSuite) TestNomsGraph1() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
b1 := db.GetDataset(context.Background(), "b1")
b1, err = addCommit(b1, "1")
s.NoError(err)
b1, err = addCommit(b1, "2")
s.NoError(err)
b1, err = addCommit(b1, "3")
s.NoError(err)
b2 := db.GetDataset(context.Background(), "b2")
b2, err = addBranchedDataset(db, b2, b1, "3.1")
s.NoError(err)
b1, err = addCommit(b1, "3.2")
s.NoError(err)
b1, err = addCommit(b1, "3.6")
s.NoError(err)
b3 := db.GetDataset(context.Background(), "b3")
b3, err = addBranchedDataset(db, b3, b2, "3.1.3")
s.NoError(err)
b3, err = addCommit(b3, "3.1.5")
s.NoError(err)
b3, err = addCommit(b3, "3.1.7")
s.NoError(err)
b2, err = mergeDatasets(db, b2, b3, "3.5")
s.NoError(err)
b2, err = addCommit(b2, "3.7")
s.NoError(err)
b1, err = mergeDatasets(db, b1, b2, "4")
s.NoError(err)
b1, err = addCommit(b1, "5")
s.NoError(err)
b1, err = addCommit(b1, "6")
s.NoError(err)
b1, err = addCommit(b1, "7")
s.NoError(err)
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value", spec.CreateValueSpecString("nbs", s.DBDir, "b1")})
s.Equal(graphRes1, res)
res, _ = s.MustRun(main, []string{"log", "--graph", spec.CreateValueSpecString("nbs", s.DBDir, "b1")})
s.Equal(diffRes1, res)
}
func (s *nomsLogTestSuite) TestNomsGraph2() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
ba := db.GetDataset(context.Background(), "ba")
ba, err = addCommit(ba, "1")
s.NoError(err)
bb := db.GetDataset(context.Background(), "bb")
bb, err = addCommit(bb, "10")
s.NoError(err)
bc := db.GetDataset(context.Background(), "bc")
bc, err = addCommit(bc, "100")
s.NoError(err)
ba, err = mergeDatasets(db, ba, bb, "11")
s.NoError(err)
_, err = mergeDatasets(db, ba, bc, "101")
s.NoError(err)
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value", spec.CreateValueSpecString("nbs", s.DBDir, "ba")})
s.Equal(graphRes2, res)
res, _ = s.MustRun(main, []string{"log", "--graph", spec.CreateValueSpecString("nbs", s.DBDir, "ba")})
s.Equal(diffRes2, res)
}
func (s *nomsLogTestSuite) TestNomsGraph3() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
w := db.GetDataset(context.Background(), "w")
w, err = addCommit(w, "1")
s.NoError(err)
w, err = addCommit(w, "2")
s.NoError(err)
x := db.GetDataset(context.Background(), "x")
x, err = addBranchedDataset(db, x, w, "20-x")
s.NoError(err)
y := db.GetDataset(context.Background(), "y")
y, err = addBranchedDataset(db, y, w, "200-y")
s.NoError(err)
z := db.GetDataset(context.Background(), "z")
z, err = addBranchedDataset(db, z, w, "2000-z")
s.NoError(err)
w, err = mergeDatasets(db, w, x, "22-wx")
s.NoError(err)
w, err = mergeDatasets(db, w, y, "222-wy")
s.NoError(err)
_, err = mergeDatasets(db, w, z, "2222-wz")
s.NoError(err)
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value", spec.CreateValueSpecString("nbs", s.DBDir, "w")})
test.EqualsIgnoreHashes(s.T(), graphRes3, res)
res, _ = s.MustRun(main, []string{"log", "--graph", spec.CreateValueSpecString("nbs", s.DBDir, "w")})
test.EqualsIgnoreHashes(s.T(), diffRes3, res)
}
func (s *nomsLogTestSuite) TestTruncation() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
toNomsList := func(l []string) types.List {
nv := []types.Value{}
for _, v := range l {
nv = append(nv, types.String(v))
}
return types.NewList(context.Background(), db, nv...)
}
t := db.GetDataset(context.Background(), "truncate")
t, err = addCommit(t, "the first line")
s.NoError(err)
l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"}
_, err = addCommitWithValue(t, toNomsList(l))
s.NoError(err)
dsSpec := spec.CreateValueSpecString("nbs", s.DBDir, "truncate")
res, _ := s.MustRun(main, []string{"log", "--graph", "--show-value", dsSpec})
test.EqualsIgnoreHashes(s.T(), truncRes1, res)
res, _ = s.MustRun(main, []string{"log", "--graph", dsSpec})
test.EqualsIgnoreHashes(s.T(), diffTrunc1, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--show-value", "--max-lines=-1", dsSpec})
test.EqualsIgnoreHashes(s.T(), truncRes2, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--max-lines=-1", dsSpec})
test.EqualsIgnoreHashes(s.T(), diffTrunc2, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--show-value", "--max-lines=0", dsSpec})
test.EqualsIgnoreHashes(s.T(), truncRes3, res)
res, _ = s.MustRun(main, []string{"log", "--graph", "--max-lines=0", dsSpec})
test.EqualsIgnoreHashes(s.T(), diffTrunc3, res)
}
func TestBranchlistSplice(t *testing.T) {
assert := assert.New(t)
bl := branchList{}
for i := 0; i < 4; i++ {
bl = bl.Splice(0, 0, branch{})
}
assert.Equal(4, len(bl))
bl = bl.Splice(3, 1)
bl = bl.Splice(0, 1)
bl = bl.Splice(1, 1)
bl = bl.Splice(0, 1)
assert.Zero(len(bl))
for i := 0; i < 4; i++ {
bl = bl.Splice(0, 0, branch{})
}
assert.Equal(4, len(bl))
branchesToDelete := []int{1, 2, 3}
bl = bl.RemoveBranches(branchesToDelete)
assert.Equal(1, len(bl))
}
const (
graphRes1 = "* cmvkhq582litl19dtf9rdr27lkmmjl5a\n| Parent: n34kv1pmaq511ej6hpvqf6nun0nfsva6\n| \"7\"\n| \n* n34kv1pmaq511ej6hpvqf6nun0nfsva6\n| Parent: j9nk6bv9r7ep1j459j0mv2clof6s7792\n| \"6\"\n| \n* j9nk6bv9r7ep1j459j0mv2clof6s7792\n| Parent: 8rkr9of92el4fvg2quhflro7615roouc\n| \"5\"\n| \n* 8rkr9of92el4fvg2quhflro7615roouc\n|\\ Merge: r4c42m0u4k8g08ivo005p1k3e9c8a3tq n05ujdtqd9enisrbcrifhc6n41anur3g\n| | \"4\"\n| | \n| * n05ujdtqd9enisrbcrifhc6n41anur3g\n| | Parent: 6eu8a3l1gunugn7uinotiv8saf783pok\n| | \"3.7\"\n| | \n| * 6eu8a3l1gunugn7uinotiv8saf783pok\n| |\\ Merge: 92u4ja5p42gr6galrq7o5ubad98qk3pp f464nqgept56er12l7ikaj9jhrdrckb7\n| | | \"3.5\"\n| | | \n| | * f464nqgept56er12l7ikaj9jhrdrckb7\n| | | Parent: gjggba0bhcjd10kmooemvbvr4gnokdcm\n| | | \"3.1.7\"\n| | | \n| | * gjggba0bhcjd10kmooemvbvr4gnokdcm\n| | | Parent: q7gc5legocg4gq4qfd2v0i41sm9q2p9d\n| | | \"3.1.5\"\n| | | \n* | | r4c42m0u4k8g08ivo005p1k3e9c8a3tq\n| | | Parent: 885bl3ggjtnf9e4h4d3cnidh576hm27u\n| | | \"3.6\"\n| | | \n| | * q7gc5legocg4gq4qfd2v0i41sm9q2p9d\n| | | Parent: 92u4ja5p42gr6galrq7o5ubad98qk3pp\n| | | \"3.1.3\"\n| | | \n* | | 885bl3ggjtnf9e4h4d3cnidh576hm27u\n| |/ Parent: 7f8hmd1okp98ovnph695kumm4lknuqcd\n| | \"3.2\"\n| | \n| * 92u4ja5p42gr6galrq7o5ubad98qk3pp\n|/ Parent: 7f8hmd1okp98ovnph695kumm4lknuqcd\n| \"3.1\"\n| \n* 7f8hmd1okp98ovnph695kumm4lknuqcd\n| Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| \"3\"\n| \n* 2r6g3brn1867i66rri1suk49fev2js7b\n| Parent: ppu3smo89vu0bdukubgooo8efeo1i7q3\n| \"2\"\n| \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \"1\"\n"
diffRes1 = "* cmvkhq582litl19dtf9rdr27lkmmjl5a\n| Parent: n34kv1pmaq511ej6hpvqf6nun0nfsva6\n| - \"6\"\n| + \"7\"\n| \n* n34kv1pmaq511ej6hpvqf6nun0nfsva6\n| Parent: j9nk6bv9r7ep1j459j0mv2clof6s7792\n| - \"5\"\n| + \"6\"\n| \n* j9nk6bv9r7ep1j459j0mv2clof6s7792\n| Parent: 8rkr9of92el4fvg2quhflro7615roouc\n| - \"4\"\n| + \"5\"\n| \n* 8rkr9of92el4fvg2quhflro7615roouc\n|\\ Merge: r4c42m0u4k8g08ivo005p1k3e9c8a3tq n05ujdtqd9enisrbcrifhc6n41anur3g\n| | - \"3.6\"\n| | + \"4\"\n| | \n| * n05ujdtqd9enisrbcrifhc6n41anur3g\n| | Parent: 6eu8a3l1gunugn7uinotiv8saf783pok\n| | - \"3.5\"\n| | + \"3.7\"\n| | \n| * 6eu8a3l1gunugn7uinotiv8saf783pok\n| |\\ Merge: 92u4ja5p42gr6galrq7o5ubad98qk3pp f464nqgept56er12l7ikaj9jhrdrckb7\n| | | - \"3.1\"\n| | | + \"3.5\"\n| | | \n| | * f464nqgept56er12l7ikaj9jhrdrckb7\n| | | Parent: gjggba0bhcjd10kmooemvbvr4gnokdcm\n| | | - \"3.1.5\"\n| | | + \"3.1.7\"\n| | | \n| | * gjggba0bhcjd10kmooemvbvr4gnokdcm\n| | | Parent: q7gc5legocg4gq4qfd2v0i41sm9q2p9d\n| | | - \"3.1.3\"\n| | | + \"3.1.5\"\n| | | \n* | | r4c42m0u4k8g08ivo005p1k3e9c8a3tq\n| | | Parent: 885bl3ggjtnf9e4h4d3cnidh576hm27u\n| | | - \"3.2\"\n| | | + \"3.6\"\n| | | \n| | * q7gc5legocg4gq4qfd2v0i41sm9q2p9d\n| | | Parent: 92u4ja5p42gr6galrq7o5ubad98qk3pp\n| | | - \"3.1\"\n| | | + \"3.1.3\"\n| | | \n* | | 885bl3ggjtnf9e4h4d3cnidh576hm27u\n| |/ Parent: 7f8hmd1okp98ovnph695kumm4lknuqcd\n| | - \"3\"\n| | + \"3.2\"\n| | \n| * 92u4ja5p42gr6galrq7o5ubad98qk3pp\n|/ Parent: 7f8hmd1okp98ovnph695kumm4lknuqcd\n| - \"3\"\n| + \"3.1\"\n| \n* 7f8hmd1okp98ovnph695kumm4lknuqcd\n| Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| - \"2\"\n| + \"3\"\n| \n* 2r6g3brn1867i66rri1suk49fev2js7b\n| Parent: ppu3smo89vu0bdukubgooo8efeo1i7q3\n| - \"1\"\n| + \"2\"\n| \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \n"
graphRes2 = "* 3vtsgav7kotnm4t0g441oah0fhc8f612\n|\\ Merge: mlfoejp9rhuq4mlmedvceq46ecofu4fm 5tfaqaak42q0iq8p8d5ej8ju1p1o18t6\n| | \"101\"\n| | \n| * 5tfaqaak42q0iq8p8d5ej8ju1p1o18t6\n| |\\ Merge: ppu3smo89vu0bdukubgooo8efeo1i7q3 7f0obrqkr8pge2o0r6tgkub7jikl0638\n| | | \"11\"\n| | | \n* | mlfoejp9rhuq4mlmedvceq46ecofu4fm\n| | Parent: None\n| | \"100\"\n| | \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \"1\"\n| \n* 7f0obrqkr8pge2o0r6tgkub7jikl0638\n| Parent: None\n| \"10\"\n"
diffRes2 = "* 3vtsgav7kotnm4t0g441oah0fhc8f612\n|\\ Merge: mlfoejp9rhuq4mlmedvceq46ecofu4fm 5tfaqaak42q0iq8p8d5ej8ju1p1o18t6\n| | - \"100\"\n| | + \"101\"\n| | \n| * 5tfaqaak42q0iq8p8d5ej8ju1p1o18t6\n| |\\ Merge: ppu3smo89vu0bdukubgooo8efeo1i7q3 7f0obrqkr8pge2o0r6tgkub7jikl0638\n| | | - \"1\"\n| | | + \"11\"\n| | | \n* | mlfoejp9rhuq4mlmedvceq46ecofu4fm\n| | Parent: None\n| | \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \n* 7f0obrqkr8pge2o0r6tgkub7jikl0638\n| Parent: None\n| \n"
graphRes3 = "* l2pilhhluk535j4620taktd87tr7bma3\n|\\ Merge: v4qkffjhedt7nmu1n8b95csd9g646iki mjneuuiveli2coui1qjm7rbr4acpaq7c\n| | \"2222-wz\"\n| | \n| * mjneuuiveli2coui1qjm7rbr4acpaq7c\n| |\\ Merge: p5pgg27dcgclv02mlde0qfmuud21mmlj mpagh8od3kpjigrl6pt3atj5bofs8tel\n| | | \"222-wy\"\n| | | \n| | * mpagh8od3kpjigrl6pt3atj5bofs8tel\n| | |\\ Merge: 2r6g3brn1867i66rri1suk49fev2js7b 5ve449uov4tl9f8gmgvf3jpj9cc32iu4\n| | | | \"22-wx\"\n| | | | \n* | | | v4qkffjhedt7nmu1n8b95csd9g646iki\n| | | | Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| | | | \"2000-z\"\n| | | | \n| * | | p5pgg27dcgclv02mlde0qfmuud21mmlj\n| | | | Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| | | | \"200-y\"\n| | | | \n| | | * 5ve449uov4tl9f8gmgvf3jpj9cc32iu4\n|/ / / Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| \"20-x\"\n| \n* 2r6g3brn1867i66rri1suk49fev2js7b\n| Parent: ppu3smo89vu0bdukubgooo8efeo1i7q3\n| \"2\"\n| \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \"1\"\n"
diffRes3 = "* l2pilhhluk535j4620taktd87tr7bma3\n|\\ Merge: v4qkffjhedt7nmu1n8b95csd9g646iki mjneuuiveli2coui1qjm7rbr4acpaq7c\n| | - \"2000-z\"\n| | + \"2222-wz\"\n| | \n| * mjneuuiveli2coui1qjm7rbr4acpaq7c\n| |\\ Merge: p5pgg27dcgclv02mlde0qfmuud21mmlj mpagh8od3kpjigrl6pt3atj5bofs8tel\n| | | - \"200-y\"\n| | | + \"222-wy\"\n| | | \n| | * mpagh8od3kpjigrl6pt3atj5bofs8tel\n| | |\\ Merge: 2r6g3brn1867i66rri1suk49fev2js7b 5ve449uov4tl9f8gmgvf3jpj9cc32iu4\n| | | | - \"2\"\n| | | | + \"22-wx\"\n| | | | \n* | | | v4qkffjhedt7nmu1n8b95csd9g646iki\n| | | | Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| | | | - \"2\"\n| | | | + \"2000-z\"\n| | | | \n| * | | p5pgg27dcgclv02mlde0qfmuud21mmlj\n| | | | Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| | | | - \"2\"\n| | | | + \"200-y\"\n| | | | \n| | | * 5ve449uov4tl9f8gmgvf3jpj9cc32iu4\n|/ / / Parent: 2r6g3brn1867i66rri1suk49fev2js7b\n| - \"2\"\n| + \"20-x\"\n| \n* 2r6g3brn1867i66rri1suk49fev2js7b\n| Parent: ppu3smo89vu0bdukubgooo8efeo1i7q3\n| - \"1\"\n| + \"2\"\n| \n* ppu3smo89vu0bdukubgooo8efeo1i7q3\n| Parent: None\n| \n"
truncRes1 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n| [ // 11 items\n| \"one\",\n| \"two\",\n| \"three\",\n| \"four\",\n| \"five\",\n| \"six\",\n| \"seven\",\n| ...\n| \n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n| \"the first line\"\n"
diffTrunc1 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n| - \"the first line\"\n| + [ // 11 items\n| + \"one\",\n| + \"two\",\n| + \"three\",\n| + \"four\",\n| + \"five\",\n| + \"six\",\n| ...\n| \n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n| \n"
truncRes2 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n| [ // 11 items\n| \"one\",\n| \"two\",\n| \"three\",\n| \"four\",\n| \"five\",\n| \"six\",\n| \"seven\",\n| \"eight\",\n| \"nine\",\n| \"ten\",\n| \"eleven\",\n| ]\n| \n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n| \"the first line\"\n"
diffTrunc2 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n| - \"the first line\"\n| + [ // 11 items\n| + \"one\",\n| + \"two\",\n| + \"three\",\n| + \"four\",\n| + \"five\",\n| + \"six\",\n| + \"seven\",\n| + \"eight\",\n| + \"nine\",\n| + \"ten\",\n| + \"eleven\",\n| + ]\n| \n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n| \n"
truncRes3 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n"
diffTrunc3 = "* p1442asfqnhgv1ebg6rijhl3kb9n4vt3\n| Parent: 4tq9si4tk8n0pead7hovehcbuued45sa\n* 4tq9si4tk8n0pead7hovehcbuued45sa\n| Parent: None\n"
metaRes1 = "p7jmuh67vhfccnqk1bilnlovnms1m67o\nParent: f8gjiv5974ojir9tnrl2k393o4s1tf0r\n- \"1\"\n+ \"2\"\n\nf8gjiv5974ojir9tnrl2k393o4s1tf0r\nParent: None\nLongNameForTest: \"Yoo\"\nTest2: \"Hoo\"\n\n"
metaRes2 = "p7jmuh67vhfccnqk1bilnlovnms1m67o (Parent: f8gjiv5974ojir9tnrl2k393o4s1tf0r)\nf8gjiv5974ojir9tnrl2k393o4s1tf0r (Parent: None)\n"
pathValue = "oki4cv7vkh743rccese3r3omf6l6mao4\nParent: lca4vejkm0iqsk7ok5322pt61u4otn6q\n2\n\nlca4vejkm0iqsk7ok5322pt61u4otn6q\nParent: u42pi8ukgkvpoi6n7d46cklske41oguf\n1\n\nu42pi8ukgkvpoi6n7d46cklske41oguf\nParent: hgmlqmsnrb3sp9jqc6mas8kusa1trrs2\n0\n\nhgmlqmsnrb3sp9jqc6mas8kusa1trrs2\nParent: hffiuecdpoq622tamm3nvungeca99ohl\n<nil>\nhffiuecdpoq622tamm3nvungeca99ohl\nParent: None\n<nil>\n"
pathDiff = "oki4cv7vkh743rccese3r3omf6l6mao4\nParent: lca4vejkm0iqsk7ok5322pt61u4otn6q\n- 1\n+ 2\n\nlca4vejkm0iqsk7ok5322pt61u4otn6q\nParent: u42pi8ukgkvpoi6n7d46cklske41oguf\n- 0\n+ 1\n\nu42pi8ukgkvpoi6n7d46cklske41oguf\nParent: hgmlqmsnrb3sp9jqc6mas8kusa1trrs2\nold (#hgmlqmsnrb3sp9jqc6mas8kusa1trrs2.value.bar) not found\n\nhgmlqmsnrb3sp9jqc6mas8kusa1trrs2\nParent: hffiuecdpoq622tamm3nvungeca99ohl\nnew (#hgmlqmsnrb3sp9jqc6mas8kusa1trrs2.value.bar) not found\nold (#hffiuecdpoq622tamm3nvungeca99ohl.value.bar) not found\n\nhffiuecdpoq622tamm3nvungeca99ohl\nParent: None\n\n"
)
-128
View File
@@ -1,128 +0,0 @@
package main
import (
"context"
"fmt"
"math/big"
"os"
"path/filepath"
"sort"
"github.com/dustin/go-humanize"
"github.com/attic-labs/noms/go/nbs"
"github.com/attic-labs/noms/go/spec"
flag "github.com/juju/gnuflag"
"github.com/attic-labs/noms/cmd/util"
)
var nomsManifest = &util.Command{
Run: runManifest,
UsageLine: "manifest <db-spec>",
Short: "Get or set the current root hash of the entire database",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.",
Flags: setupManifestFlags,
Nargs: 1,
}
type NbsFile struct {
manifestSpec nbs.TableSpecInfo
fileInfo os.FileInfo
fileInfoErr error
}
func (f NbsFile) sizeStr() string {
if f.fileInfoErr == nil {
bi := big.Int{}
bi.SetInt64(f.fileInfo.Size())
return humanize.BigBytes(&bi)
}
return "-"
}
func (f NbsFile) modTimeStr() string {
if f.fileInfoErr == nil {
return f.fileInfo.ModTime().String()[:22]
}
return "-"
}
func setupManifestFlags() *flag.FlagSet {
flagSet := flag.NewFlagSet("manifest", flag.ExitOnError)
return flagSet
}
func runManifest(ctx context.Context, args []string) int {
if len(args) < 1 {
fmt.Fprintln(os.Stderr, "Not enough arguments")
return 0
}
dbArg := args[0]
spec, err := spec.ForDatabase(dbArg)
if err != nil {
fmt.Fprintln(os.Stderr, args[0]+" is not a valid database spec")
return 1
}
if spec.Protocol != "nbs" {
fmt.Fprintln(os.Stderr, spec.Protocol+" databases not supported by this command yet. Only nbs")
return 1
}
manifestFile := filepath.Join(spec.DatabaseName, "manifest")
manifestReader, err := os.Open(manifestFile)
if err != nil {
fmt.Fprintln(os.Stderr, "Could not read file", manifestFile, err)
return 1
}
var manifest nbs.ManifestInfo
manifest = nbs.ParseManifest(manifestReader)
numSpecs := manifest.NumTableSpecs()
nbsFiles := make([]NbsFile, numSpecs)
for i := 0; i < numSpecs; i++ {
tableSpecInfo := manifest.GetTableSpecInfo(i)
path := filepath.Join(spec.DatabaseName, tableSpecInfo.GetName())
fileInfo, err := os.Stat(path)
nbsFiles[i] = NbsFile{tableSpecInfo, fileInfo, err}
}
// Sort these by time stamp makes it much easier to see what happens over time and understand
// what is going on as you run different operations.
sort.SliceStable(nbsFiles, func(i, j int) bool {
f1Stat, err1 := nbsFiles[i].fileInfo, nbsFiles[i].fileInfoErr
f2Stat, err2 := nbsFiles[j].fileInfo, nbsFiles[j].fileInfoErr
if err1 != nil {
return true
} else if err2 != nil {
return false
}
return f1Stat.ModTime().Sub(f2Stat.ModTime()) < 0
})
fmt.Println(manifestFile + ":")
fmt.Printf(" version: %s\n", manifest.GetVersion())
fmt.Printf(" lock: %s\n", manifest.GetLock())
fmt.Printf(" root: %s\n", manifest.GetRoot())
fmt.Println(" referenced nbs files:")
for _, nbsFile := range nbsFiles {
name := nbsFile.manifestSpec.GetName()
chunkCnt := nbsFile.manifestSpec.GetChunkCount()
sizeStr := nbsFile.sizeStr()
existsStr := nbsFile.fileInfoErr == nil
modTimeStr := nbsFile.modTimeStr()
fmt.Printf(" %s chunks: %2d exists: %-6t size: %7s modified: %10s\n", name, chunkCnt, existsStr, sizeStr, modTimeStr)
}
return 0
}
-121
View File
@@ -1,121 +0,0 @@
// Copyright 2018 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
)
func nomsMap(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
maap := noms.Command("map", "interact with maps")
mapNew := maap.Command("new", "creates a new map")
newDb := mapNew.Arg("database", "spec to db to create map within").Required().String()
newEntries := mapNew.Arg("entries", "key/value pairs for entries").Strings()
mapSet := maap.Command("set", "sets one or more keys in a map")
setSpec := mapSet.Arg("spec", "value spec for the map to edit").Required().String()
setEntries := mapSet.Arg("entries", "key/value pairs for entries").Strings()
mapDel := maap.Command("del", "removes one or more entries from a map")
delSpec := mapDel.Arg("spec", "value spec for the map to edit").Required().String()
delKeys := mapDel.Arg("keys", "keys for the entries to be removed").Strings()
return maap, func(input string) int {
switch input {
case mapNew.FullCommand():
return nomsMapNew(ctx, *newDb, *newEntries)
case mapSet.FullCommand():
return nomsMapSet(ctx, *setSpec, *setEntries)
case mapDel.FullCommand():
return nomsMapDel(ctx, *delSpec, *delKeys)
}
d.Panic("notreached")
return 1
}
}
func nomsMapNew(ctx context.Context, dbStr string, args []string) int {
sp, err := spec.ForDatabase(dbStr)
d.PanicIfError(err)
applyMapEdits(ctx, sp, types.NewMap(ctx, sp.GetDatabase(ctx)), nil, args)
return 0
}
func nomsMapSet(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
applyMapEdits(ctx, sp, rootVal, basePath, args)
return 0
}
func nomsMapDel(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
patch := diff.Patch{}
for i := 0; i < len(args); i++ {
kp := parseKeyPart(args, i)
patch = append(patch, diff.Difference{
Path: append(basePath, kp),
ChangeType: types.DiffChangeRemoved,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
return 0
}
func applyMapEdits(ctx context.Context, sp spec.Spec, rootVal types.Value, basePath types.Path, args []string) {
if len(args)%2 != 0 {
d.CheckError(fmt.Errorf("Must be an even number of key/value pairs"))
}
if rootVal == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at: %s", sp.String()))
return
}
db := sp.GetDatabase(ctx)
patch := diff.Patch{}
for i := 0; i < len(args); i += 2 {
kp := parseKeyPart(args, i)
vv, err := argumentToValue(ctx, args[i+1], db)
if err != nil {
d.CheckError(fmt.Errorf("Invalid value: %s at position %d: %s", args[i+1], i+1, err))
}
patch = append(patch, diff.Difference{
Path: append(basePath, kp),
ChangeType: types.DiffChangeModified,
NewValue: vv,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
}
func parseKeyPart(args []string, i int) (res types.PathPart) {
idx, h, rem, err := types.ParsePathIndex(args[i])
if rem != "" {
d.CheckError(fmt.Errorf("Invalid key: %s at position %d", args[i], i))
}
if err != nil {
d.CheckError(fmt.Errorf("Invalid key: %s at position %d: %s", args[i], i, err))
}
if idx != nil {
res = types.NewIndexPath(idx)
} else {
res = types.NewHashIndexPath(h)
}
return
}
-180
View File
@@ -1,180 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"io"
"os"
"regexp"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/merge"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/status"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var (
resolver string
nomsMerge = &util.Command{
Run: runMerge,
UsageLine: "merge [options] <database> <left-dataset-name> <right-dataset-name> <output-dataset-name>",
Short: "Merges and commits the head values of two named datasets",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.\nYu must provide a working database and the names of two Datasets you want to merge. The values at the heads of these Datasets will be merged, put into a new Commit object, and set as the Head of the third provided Dataset name.",
Flags: setupMergeFlags,
Nargs: 1, // if absolute-path not present we read it from stdin
}
datasetRe = regexp.MustCompile("^" + datas.DatasetRe.String() + "$")
)
func setupMergeFlags() *flag.FlagSet {
commitFlagSet := flag.NewFlagSet("merge", flag.ExitOnError)
commitFlagSet.StringVar(&resolver, "policy", "n", "conflict resolution policy for merging. Defaults to 'n', which means no resolution strategy will be applied. Supported values are 'l' (left), 'r' (right) and 'p' (prompt). 'prompt' will bring up a simple command-line prompt allowing you to resolve conflicts by choosing between 'l' or 'r' on a case-by-case basis.")
verbose.RegisterVerboseFlags(commitFlagSet)
return commitFlagSet
}
func checkIfTrue(b bool, format string, args ...interface{}) {
if b {
d.CheckErrorNoUsage(fmt.Errorf(format, args...))
}
}
func runMerge(ctx context.Context, args []string) int {
cfg := config.NewResolver()
if len(args) != 4 {
d.CheckErrorNoUsage(fmt.Errorf("Incorrect number of arguments"))
}
db, err := cfg.GetDatabase(ctx, args[0])
d.CheckError(err)
defer db.Close()
leftDS, rightDS, outDS := resolveDatasets(ctx, db, args[1], args[2], args[3])
left, right, ancestor := getMergeCandidates(ctx, db, leftDS, rightDS)
policy := decidePolicy(resolver)
pc := newMergeProgressChan()
merged, err := policy(ctx, left, right, ancestor, db, pc)
d.CheckErrorNoUsage(err)
close(pc)
_, err = db.SetHead(ctx, outDS, db.WriteValue(ctx, datas.NewCommit(merged, types.NewSet(ctx, db, leftDS.HeadRef(), rightDS.HeadRef()), types.EmptyStruct)))
d.PanicIfError(err)
if !verbose.Quiet() {
status.Printf("Done")
status.Done()
}
return 0
}
func resolveDatasets(ctx context.Context, db datas.Database, leftName, rightName, outName string) (leftDS, rightDS, outDS datas.Dataset) {
makeDS := func(dsName string) datas.Dataset {
if !datasetRe.MatchString(dsName) {
d.CheckErrorNoUsage(fmt.Errorf("Invalid dataset %s, must match %s", dsName, datas.DatasetRe.String()))
}
return db.GetDataset(ctx, dsName)
}
leftDS = makeDS(leftName)
rightDS = makeDS(rightName)
outDS = makeDS(outName)
return
}
func getMergeCandidates(ctx context.Context, db datas.Database, leftDS, rightDS datas.Dataset) (left, right, ancestor types.Value) {
leftRef, ok := leftDS.MaybeHeadRef()
checkIfTrue(!ok, "Dataset %s has no data", leftDS.ID())
rightRef, ok := rightDS.MaybeHeadRef()
checkIfTrue(!ok, "Dataset %s has no data", rightDS.ID())
ancestorCommit, ok := getCommonAncestor(ctx, leftRef, rightRef, db)
checkIfTrue(!ok, "Datasets %s and %s have no common ancestor", leftDS.ID(), rightDS.ID())
return leftDS.HeadValue(), rightDS.HeadValue(), ancestorCommit.Get(datas.ValueField)
}
func getCommonAncestor(ctx context.Context, r1, r2 types.Ref, vr types.ValueReader) (a types.Struct, found bool) {
aRef, found := datas.FindCommonAncestor(ctx, r1, r2, vr)
if !found {
return
}
v := vr.ReadValue(ctx, aRef.TargetHash())
if v == nil {
panic(aRef.TargetHash().String() + " not found")
}
if !datas.IsCommit(v) {
panic("Not a commit: " + types.EncodedValueMaxLines(ctx, v, 10) + " ...")
}
return v.(types.Struct), true
}
func newMergeProgressChan() chan struct{} {
pc := make(chan struct{}, 128)
go func() {
count := 0
for range pc {
if !verbose.Quiet() {
count++
status.Printf("Applied %d changes...", count)
}
}
}()
return pc
}
func decidePolicy(policy string) merge.Policy {
var resolve merge.ResolveFunc
switch policy {
case "n", "N":
resolve = merge.None
case "l", "L":
resolve = merge.Ours
case "r", "R":
resolve = merge.Theirs
case "p", "P":
resolve = func(aType, bType types.DiffChangeType, a, b types.Value, path types.Path) (change types.DiffChangeType, merged types.Value, ok bool) {
return cliResolve(os.Stdin, os.Stdout, aType, bType, a, b, path)
}
default:
d.CheckErrorNoUsage(fmt.Errorf("Unsupported merge policy: %s. Choices are n, l, r and a.", policy))
}
return merge.NewThreeWay(resolve)
}
func cliResolve(in io.Reader, out io.Writer, aType, bType types.DiffChangeType, a, b types.Value, path types.Path) (change types.DiffChangeType, merged types.Value, ok bool) {
stringer := func(v types.Value) (s string, success bool) {
switch v := v.(type) {
case types.Bool, types.Float, types.String:
return fmt.Sprintf("%v", v), true
}
return "", false
}
left, lOk := stringer(a)
right, rOk := stringer(b)
if !lOk || !rOk {
return change, merged, false
}
// TODO: Handle removes as well.
fmt.Fprintf(out, "\nConflict at: %s\n", path.String())
fmt.Fprintf(out, "Left: %s\nRight: %s\n\n", left, right)
var choice rune
for {
fmt.Fprintln(out, "Enter 'l' to accept the left value, 'r' to accept the right value")
_, err := fmt.Fscanf(in, "%c\n", &choice)
d.PanicIfError(err)
switch choice {
case 'l', 'L':
return aType, a, true
case 'r', 'R':
return bType, b, true
}
}
}
-253
View File
@@ -1,253 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"io/ioutil"
"os"
"testing"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
type nomsMergeTestSuite struct {
clienttest.ClientTestSuite
}
func TestNomsMerge(t *testing.T) {
suite.Run(t, &nomsMergeTestSuite{})
}
func (s *nomsMergeTestSuite) TearDownTest() {
s.NoError(os.RemoveAll(s.DBDir))
}
func (s *nomsMergeTestSuite) TestNomsMerge_Success() {
left, right := "left", "right"
parentSpec := s.spec("parent")
defer parentSpec.Close()
leftSpec := s.spec(left)
defer leftSpec.Close()
rightSpec := s.spec(right)
defer rightSpec.Close()
p := s.setupMergeDataset(
parentSpec,
types.StructData{
"num": types.Float(42),
"str": types.String("foobar"),
"lst": types.NewList(context.Background(), parentSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo")),
"map": types.NewMap(context.Background(), parentSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo"),
types.String("foo"), types.Float(1)),
},
types.NewSet(context.Background(), parentSpec.GetDatabase(context.Background())))
l := s.setupMergeDataset(
leftSpec,
types.StructData{
"num": types.Float(42),
"str": types.String("foobaz"),
"lst": types.NewList(context.Background(), leftSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo")),
"map": types.NewMap(context.Background(), leftSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo"),
types.String("foo"), types.Float(1)),
},
types.NewSet(context.Background(), leftSpec.GetDatabase(context.Background()), p))
r := s.setupMergeDataset(
rightSpec,
types.StructData{
"num": types.Float(42),
"str": types.String("foobar"),
"lst": types.NewList(context.Background(), rightSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo")),
"map": types.NewMap(context.Background(), rightSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo"),
types.String("foo"), types.Float(1), types.Float(2), types.String("bar")),
},
types.NewSet(context.Background(), rightSpec.GetDatabase(context.Background()), p))
expected := types.NewStruct("", types.StructData{
"num": types.Float(42),
"str": types.String("foobaz"),
"lst": types.NewList(context.Background(), parentSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo")),
"map": types.NewMap(context.Background(), parentSpec.GetDatabase(context.Background()), types.Float(1), types.String("foo"),
types.String("foo"), types.Float(1), types.Float(2), types.String("bar")),
})
output := "output"
stdout, stderr, err := s.Run(main, []string{"merge", s.DBDir, left, right, output})
if err == nil {
s.Equal("", stderr)
s.validateDataset(output, expected, l, r)
} else {
s.Fail("Run failed", "err: %v\nstdout: %s\nstderr: %s\n", err, stdout, stderr)
}
}
func (s *nomsMergeTestSuite) spec(name string) spec.Spec {
sp, err := spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, name))
s.NoError(err)
return sp
}
func (s *nomsMergeTestSuite) setupMergeDataset(sp spec.Spec, data types.StructData, p types.Set) types.Ref {
ds := sp.GetDataset(context.Background())
ds, err := sp.GetDatabase(context.Background()).Commit(context.Background(), ds, types.NewStruct("", data), datas.CommitOptions{Parents: p})
s.NoError(err)
return ds.HeadRef()
}
func (s *nomsMergeTestSuite) validateDataset(name string, expected types.Struct, parents ...types.Value) {
sp, err := spec.ForDataset(spec.CreateValueSpecString("nbs", s.DBDir, name))
db := sp.GetDatabase(context.Background())
if s.NoError(err) {
defer sp.Close()
commit := sp.GetDataset(context.Background()).Head()
s.True(commit.Get(datas.ParentsField).Equals(types.NewSet(context.Background(), db, parents...)))
merged := sp.GetDataset(context.Background()).HeadValue()
s.True(expected.Equals(merged), "%s != %s", types.EncodedValue(context.Background(), expected), types.EncodedValue(context.Background(), merged))
}
}
func (s *nomsMergeTestSuite) TestNomsMerge_Left() {
left, right := "left", "right"
parentSpec := s.spec("parent")
defer parentSpec.Close()
leftSpec := s.spec(left)
defer leftSpec.Close()
rightSpec := s.spec(right)
defer rightSpec.Close()
p := s.setupMergeDataset(parentSpec, types.StructData{"num": types.Float(42)}, types.NewSet(context.Background(), parentSpec.GetDatabase(context.Background())))
l := s.setupMergeDataset(leftSpec, types.StructData{"num": types.Float(43)}, types.NewSet(context.Background(), leftSpec.GetDatabase(context.Background()), p))
r := s.setupMergeDataset(rightSpec, types.StructData{"num": types.Float(44)}, types.NewSet(context.Background(), rightSpec.GetDatabase(context.Background()), p))
expected := types.NewStruct("", types.StructData{"num": types.Float(43)})
output := "output"
stdout, stderr, err := s.Run(main, []string{"merge", "--policy=l", s.DBDir, left, right, output})
if err == nil {
s.Equal("", stderr)
s.validateDataset(output, expected, l, r)
} else {
s.Fail("Run failed", "err: %v\nstdout: %s\nstderr: %s\n", err, stdout, stderr)
}
}
func (s *nomsMergeTestSuite) TestNomsMerge_Right() {
left, right := "left", "right"
parentSpec := s.spec("parent")
defer parentSpec.Close()
leftSpec := s.spec(left)
defer leftSpec.Close()
rightSpec := s.spec(right)
defer rightSpec.Close()
p := s.setupMergeDataset(parentSpec, types.StructData{"num": types.Float(42)}, types.NewSet(context.Background(), parentSpec.GetDatabase(context.Background())))
l := s.setupMergeDataset(leftSpec, types.StructData{"num": types.Float(43)}, types.NewSet(context.Background(), leftSpec.GetDatabase(context.Background()), p))
r := s.setupMergeDataset(rightSpec, types.StructData{"num": types.Float(44)}, types.NewSet(context.Background(), rightSpec.GetDatabase(context.Background()), p))
expected := types.NewStruct("", types.StructData{"num": types.Float(44)})
output := "output"
stdout, stderr, err := s.Run(main, []string{"merge", "--policy=r", s.DBDir, left, right, output})
if err == nil {
s.Equal("", stderr)
s.validateDataset(output, expected, l, r)
} else {
s.Fail("Run failed", "err: %v\nstdout: %s\nstderr: %s\n", err, stdout, stderr)
}
}
func (s *nomsMergeTestSuite) TestNomsMerge_Conflict() {
left, right := "left", "right"
parentSpec := s.spec("parent")
defer parentSpec.Close()
leftSpec := s.spec(left)
defer leftSpec.Close()
rightSpec := s.spec(right)
defer rightSpec.Close()
p := s.setupMergeDataset(parentSpec, types.StructData{"num": types.Float(42)}, types.NewSet(context.Background(), parentSpec.GetDatabase(context.Background())))
s.setupMergeDataset(leftSpec, types.StructData{"num": types.Float(43)}, types.NewSet(context.Background(), leftSpec.GetDatabase(context.Background()), p))
s.setupMergeDataset(rightSpec, types.StructData{"num": types.Float(44)}, types.NewSet(context.Background(), rightSpec.GetDatabase(context.Background()), p))
s.Panics(func() { s.MustRun(main, []string{"merge", s.DBDir, left, right, "output"}) })
}
func (s *nomsMergeTestSuite) TestBadInput() {
sp, err := spec.ForDatabase(spec.CreateDatabaseSpecString("nbs", s.DBDir))
s.NoError(err)
defer sp.Close()
l, r, o := "left", "right", "output"
type c struct {
args []string
err string
}
cases := []c{
{[]string{sp.String(), l + "!!", r, o}, "error: Invalid dataset " + l + "!!, must match [a-zA-Z0-9\\-_/]+\n"},
{[]string{sp.String(), l + "2", r, o}, "error: Dataset " + l + "2 has no data\n"},
{[]string{sp.String(), l, r + "2", o}, "error: Dataset " + r + "2 has no data\n"},
{[]string{sp.String(), l, r, "!invalid"}, "error: Invalid dataset !invalid, must match [a-zA-Z0-9\\-_/]+\n"},
}
db := sp.GetDatabase(context.Background())
prep := func(dsName string) {
ds := db.GetDataset(context.Background(), dsName)
db.CommitValue(context.Background(), ds, types.NewMap(context.Background(), db, types.String("foo"), types.String("bar")))
}
prep(l)
prep(r)
for _, c := range cases {
stdout, stderr, err := s.Run(main, append([]string{"merge"}, c.args...))
s.Empty(stdout, "Expected empty stdout for case: %#v", c.args)
if !s.NotNil(err, "Unexpected success for case: %#v\n", c.args) {
continue
}
if mainErr, ok := err.(clienttest.ExitError); ok {
s.Equal(1, mainErr.Code)
s.Equal(c.err, stderr, "Unexpected output for case: %#v\n", c.args)
} else {
s.Fail("Run() recovered non-error panic", "err: %#v\nstdout: %s\nstderr: %s\n", err, stdout, stderr)
}
}
}
func TestNomsMergeCliResolve(t *testing.T) {
type c struct {
input string
aChange, bChange types.DiffChangeType
aVal, bVal types.Value
expectedChange types.DiffChangeType
expected types.Value
success bool
}
cases := []c{
{"l\n", types.DiffChangeAdded, types.DiffChangeAdded, types.String("foo"), types.String("bar"), types.DiffChangeAdded, types.String("foo"), true},
{"r\n", types.DiffChangeAdded, types.DiffChangeAdded, types.String("foo"), types.String("bar"), types.DiffChangeAdded, types.String("bar"), true},
{"l\n", types.DiffChangeAdded, types.DiffChangeAdded, types.Float(7), types.String("bar"), types.DiffChangeAdded, types.Float(7), true},
{"r\n", types.DiffChangeModified, types.DiffChangeModified, types.Float(7), types.String("bar"), types.DiffChangeModified, types.String("bar"), true},
}
for _, c := range cases {
input := bytes.NewBufferString(c.input)
changeType, newVal, ok := cliResolve(input, ioutil.Discard, c.aChange, c.bChange, c.aVal, c.bVal, types.Path{})
if !c.success {
assert.False(t, ok)
} else if assert.True(t, ok) {
assert.Equal(t, c.expectedChange, changeType)
assert.True(t, c.expected.Equals(newVal))
}
}
}
-114
View File
@@ -1,114 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"os"
"strings"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/hash"
"github.com/attic-labs/noms/go/types"
flag "github.com/juju/gnuflag"
)
var nomsRoot = &util.Command{
Run: runRoot,
UsageLine: "root <db-spec>",
Short: "Get or set the current root hash of the entire database",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.",
Flags: setupRootFlags,
Nargs: 1,
}
var updateRoot = ""
func setupRootFlags() *flag.FlagSet {
flagSet := flag.NewFlagSet("root", flag.ExitOnError)
flagSet.StringVar(&updateRoot, "update", "", "Replaces the entire database with the one with the given hash")
return flagSet
}
func runRoot(ctx context.Context, args []string) int {
if len(args) < 1 {
fmt.Fprintln(os.Stderr, "Not enough arguments")
return 0
}
cfg := config.NewResolver()
cs, err := cfg.GetChunkStore(ctx, args[0])
d.CheckErrorNoUsage(err)
currRoot := cs.Root(ctx)
if updateRoot == "" {
fmt.Println(currRoot)
return 0
}
if updateRoot[0] == '#' {
updateRoot = updateRoot[1:]
}
h, ok := hash.MaybeParse(updateRoot)
if !ok {
fmt.Fprintf(os.Stderr, "Invalid hash: %s\n", h.String())
return 1
}
// If BUG 3407 is correct, we might be able to just take cs and make a Database directly from that.
db, err := cfg.GetDatabase(ctx, args[0])
d.CheckErrorNoUsage(err)
defer db.Close()
if !validate(ctx, db.ReadValue(ctx, h)) {
return 1
}
fmt.Println(`WARNING
This operation replaces the entire database with the instance having the given
hash. The old database becomes eligible for GC.
ANYTHING NOT SAVED WILL BE LOST
Continue?`)
var input string
n, err := fmt.Scanln(&input)
d.CheckErrorNoUsage(err)
if n != 1 || strings.ToLower(input) != "y" {
return 0
}
ok = cs.Commit(ctx, h, currRoot)
if !ok {
fmt.Fprintln(os.Stderr, "Optimistic concurrency failure")
return 1
}
fmt.Printf("Success. Previous root was: %s\n", currRoot)
return 0
}
func validate(ctx context.Context, r types.Value) bool {
rootType := types.MakeMapType(types.StringType, types.MakeRefType(types.ValueType))
if !types.IsValueSubtypeOf(r, rootType) {
fmt.Fprintf(os.Stderr, "Root of database must be %s, but you specified: %s\n", rootType.Describe(ctx), types.TypeOf(r).Describe(ctx))
return false
}
return r.(types.Map).Any(ctx, func(k, v types.Value) bool {
if !datas.IsRefOfCommitType(types.TypeOf(v)) {
fmt.Fprintf(os.Stderr, "Invalid root map. Value for key '%s' is not a ref of commit.", string(k.(types.String)))
return false
}
return true
})
}
-44
View File
@@ -1,44 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"testing"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
func TestNomsRoot(t *testing.T) {
suite.Run(t, &nomsRootTestSuite{})
}
type nomsRootTestSuite struct {
clienttest.ClientTestSuite
}
func (s *nomsRootTestSuite) TestBasic() {
datasetName := "root-get"
dsSpec := spec.CreateValueSpecString("nbs", s.DBDir, datasetName)
sp, err := spec.ForDataset(dsSpec)
s.NoError(err)
defer sp.Close()
ds := sp.GetDataset(context.Background())
dbSpecStr := spec.CreateDatabaseSpecString("nbs", s.DBDir)
ds, _ = ds.Database().CommitValue(context.Background(), ds, types.String("hello!"))
c1, _ := s.MustRun(main, []string{"root", dbSpecStr})
s.Equal("5te45oue1g918rpcvmc3d2emqkse4fhq\n", c1)
ds, _ = ds.Database().CommitValue(context.Background(), ds, types.String("goodbye"))
c2, _ := s.MustRun(main, []string{"root", dbSpecStr})
s.Equal("nm81pr21t66nec3v8jts5e37njg5ab1g\n", c2)
// TODO: Would be good to test successful --update too, but requires changes to MustRun to allow
// input because of prompt :(.
}
-146
View File
@@ -1,146 +0,0 @@
// Copyright 2018 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"fmt"
"strconv"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
)
func nomsSet(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
set := noms.Command("set", "interact with sets")
setNew := set.Command("new", "creates a new set")
newDb := setNew.Arg("database", "spec to db to create set within").Required().String()
newEntries := setNew.Arg("items", "items to insert").Strings()
setInsert := set.Command("insert", "inserts one or more items into a set")
insertSpec := setInsert.Arg("spec", "value spec for the set to edit").Required().String()
insertEntries := setInsert.Arg("items", "items to insert").Strings()
setDel := set.Command("del", "removes one or more items from a set")
delSpec := setDel.Arg("spec", "value spec for the set to edit").Required().String()
delEntries := setDel.Arg("items", "items to delete").Strings()
return set, func(input string) int {
switch input {
case setNew.FullCommand():
return nomsSetNew(ctx, *newDb, *newEntries)
case setInsert.FullCommand():
return nomsSetInsert(ctx, *insertSpec, *insertEntries)
case setDel.FullCommand():
return nomsSetDel(ctx, *delSpec, *delEntries)
}
d.Panic("notreached")
return 1
}
}
func nomsSetNew(ctx context.Context, dbStr string, args []string) int {
sp, err := spec.ForDatabase(dbStr)
d.PanicIfError(err)
applySetEdits(ctx, sp, types.NewSet(ctx, sp.GetDatabase(ctx)), nil, types.DiffChangeAdded, args)
return 0
}
func nomsSetInsert(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
applySetEdits(ctx, sp, rootVal, basePath, types.DiffChangeAdded, args)
return 0
}
func nomsSetDel(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
applySetEdits(ctx, sp, rootVal, basePath, types.DiffChangeRemoved, args)
return 0
}
func applySetEdits(ctx context.Context, sp spec.Spec, rootVal types.Value, basePath types.Path, ct types.DiffChangeType, args []string) {
if rootVal == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at: %s", sp.String()))
return
}
db := sp.GetDatabase(ctx)
patch := diff.Patch{}
for i := 0; i < len(args); i++ {
vv, err := argumentToValue(ctx, args[i], db)
if err != nil {
d.CheckErrorNoUsage(err)
}
var pp types.PathPart
if types.ValueCanBePathIndex(vv) {
pp = types.NewIndexPath(vv)
} else {
pp = types.NewHashIndexPath(vv.Hash())
}
d := diff.Difference{
Path: append(basePath, pp),
}
if ct == types.DiffChangeAdded {
d.NewValue = vv
} else {
d.OldValue = vv
}
patch = append(patch, d)
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
}
func argumentToValue(ctx context.Context, arg string, db datas.Database) (types.Value, error) {
d.PanicIfTrue(arg == "")
if arg == "true" {
return types.Bool(true), nil
}
if arg == "false" {
return types.Bool(false), nil
}
if arg[0] == '"' {
buf := bytes.Buffer{}
for i := 1; i < len(arg); i++ {
c := arg[i]
if c == '"' {
if i != len(arg)-1 {
break
}
return types.String(buf.String()), nil
}
if c == '\\' {
i++
c = arg[i]
if c != '\\' && c != '"' {
return nil, fmt.Errorf("Invalid string argument: %s: Only '\\' and '\"' can be escaped", arg)
}
}
buf.WriteByte(c)
}
return nil, fmt.Errorf("Invalid string argument: %s", arg)
}
if arg[0] == '@' {
p, err := spec.NewAbsolutePath(arg[1:])
d.PanicIfError(err)
return p.Resolve(ctx, db), nil
}
if n, err := strconv.ParseFloat(arg, 64); err == nil {
return types.Float(n), nil
}
return types.String(arg), nil
}
-94
View File
@@ -1,94 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"fmt"
"io"
"os"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/datetime"
"github.com/attic-labs/noms/go/util/outputpager"
"github.com/attic-labs/noms/go/util/verbose"
flag "github.com/juju/gnuflag"
)
var nomsShow = &util.Command{
Run: runShow,
UsageLine: "show [flags] <object>",
Short: "Shows a serialization of a Noms object",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object argument.",
Flags: setupShowFlags,
Nargs: 1,
}
var (
showRaw = false
showStats = false
showPages = false
tzName string
)
func setupShowFlags() *flag.FlagSet {
showFlagSet := flag.NewFlagSet("show", flag.ExitOnError)
outputpager.RegisterOutputpagerFlags(showFlagSet)
verbose.RegisterVerboseFlags(showFlagSet)
showFlagSet.BoolVar(&showPages, "page", false, "If true output is shown in an output pager")
showFlagSet.BoolVar(&showRaw, "raw", false, "If true, dumps the raw binary version of the data")
showFlagSet.BoolVar(&showStats, "stats", false, "If true, reports statistics related to the value")
showFlagSet.StringVar(&tzName, "tz", "local", "display formatted date comments in specified timezone, must be: local or utc")
return showFlagSet
}
func runShow(ctx context.Context, args []string) int {
cfg := config.NewResolver()
database, value, err := cfg.GetPath(ctx, args[0])
d.CheckErrorNoUsage(err)
defer database.Close()
if value == nil {
fmt.Fprintf(os.Stderr, "Object not found: %s\n", args[0])
return 0
}
if showRaw && showStats {
fmt.Fprintln(os.Stderr, "--raw and --stats are mutually exclusive")
return 0
}
if showRaw {
ch := types.EncodeValue(value)
buf := bytes.NewBuffer(ch.Data())
_, err = io.Copy(os.Stdout, buf)
d.CheckError(err)
return 0
}
if showStats {
types.WriteValueStats(ctx, os.Stdout, value, database)
return 0
}
tz, _ := locationFromTimezoneArg(tzName, nil)
datetime.RegisterHRSCommenter(tz)
if showPages {
pgr := outputpager.Start()
defer pgr.Stop()
types.WriteEncodedValue(ctx, pgr.Writer, value)
fmt.Fprintln(pgr.Writer)
} else {
types.WriteEncodedValue(ctx, os.Stdout, value)
}
return 0
}
-129
View File
@@ -1,129 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"testing"
"github.com/attic-labs/noms/go/chunks"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/attic-labs/noms/go/util/test"
"github.com/stretchr/testify/suite"
)
func TestNomsShow(t *testing.T) {
suite.Run(t, &nomsShowTestSuite{})
}
type nomsShowTestSuite struct {
clienttest.ClientTestSuite
}
const (
res1 = "struct Commit {\n meta: struct {},\n parents: set {},\n value: #nl181uu1ioc2j6t7mt9paidjlhlcjtgj,\n}"
res2 = "\"test string\""
res3 = "struct Commit {\n meta: struct {},\n parents: set {\n #4g7ggl6999v5mlucl4a507n7k3kvckiq,\n },\n value: #82adk7hfcudg8fktittm672to66t6qeu,\n}"
res4 = "[\n \"elem1\",\n 2,\n \"elem3\",\n]"
res5 = "struct Commit {\n meta: struct {},\n parents: set {\n #3tmg89vabs2k6hotdock1kuo13j4lmqv,\n },\n value: #5cgfu2vk4nc21m1vjkjjpd2kvcm2df7q,\n}"
)
func (s *nomsShowTestSuite) spec(str string) spec.Spec {
sp, err := spec.ForDataset(str)
s.NoError(err)
return sp
}
func (s *nomsShowTestSuite) writeTestData(str string, value types.Value) types.Ref {
sp := s.spec(str)
defer sp.Close()
db := sp.GetDatabase(context.Background())
r1 := db.WriteValue(context.Background(), value)
_, err := db.CommitValue(context.Background(), sp.GetDataset(context.Background()), r1)
s.NoError(err)
return r1
}
func (s *nomsShowTestSuite) TestNomsShow() {
datasetName := "dsTest"
str := spec.CreateValueSpecString("nbs", s.DBDir, datasetName)
s1 := types.String("test string")
r := s.writeTestData(str, s1)
res, _ := s.MustRun(main, []string{"show", str})
s.Equal(res1, res)
str1 := spec.CreateValueSpecString("nbs", s.DBDir, "#"+r.TargetHash().String())
res, _ = s.MustRun(main, []string{"show", str1})
s.Equal(res2, res)
sp := s.spec(str)
defer sp.Close()
list := types.NewList(context.Background(), sp.GetDatabase(context.Background()), types.String("elem1"), types.Float(2), types.String("elem3"))
r = s.writeTestData(str, list)
res, _ = s.MustRun(main, []string{"show", str})
test.EqualsIgnoreHashes(s.T(), res3, res)
str1 = spec.CreateValueSpecString("nbs", s.DBDir, "#"+r.TargetHash().String())
res, _ = s.MustRun(main, []string{"show", str1})
s.Equal(res4, res)
_ = s.writeTestData(str, s1)
res, _ = s.MustRun(main, []string{"show", str})
test.EqualsIgnoreHashes(s.T(), res5, res)
}
func (s *nomsShowTestSuite) TestNomsShowNotFound() {
str := spec.CreateValueSpecString("nbs", s.DBDir, "not-there")
stdout, stderr, err := s.Run(main, []string{"show", str})
s.Equal("", stdout)
s.Equal(fmt.Sprintf("Object not found: %s\n", str), stderr)
s.Nil(err)
}
func (s *nomsShowTestSuite) TestNomsShowRaw() {
datasetName := "showRaw"
str := spec.CreateValueSpecString("nbs", s.DBDir, datasetName)
sp, err := spec.ForDataset(str)
s.NoError(err)
defer sp.Close()
db := sp.GetDatabase(context.Background())
// Put a value into the db, get its raw serialization, then deserialize it and ensure it comes
// out to same thing.
test := func(in types.Value) {
r1 := db.WriteValue(context.Background(), in)
db.CommitValue(context.Background(), sp.GetDataset(context.Background()), r1)
res, _ := s.MustRun(main, []string{"show", "--raw",
spec.CreateValueSpecString("nbs", s.DBDir, "#"+r1.TargetHash().String())})
ch := chunks.NewChunk([]byte(res))
out := types.DecodeValue(ch, db)
s.True(out.Equals(in))
}
// Primitive value with no child chunks
test(types.String("hello"))
// Ref (one child chunk)
test(db.WriteValue(context.Background(), types.Float(42)))
// Prolly tree with multiple child chunks
items := make([]types.Value, 10000)
for i := 0; i < len(items); i++ {
items[i] = types.Float(i)
}
l := types.NewList(context.Background(), db, items...)
numChildChunks := 0
l.WalkRefs(func(r types.Ref) {
numChildChunks++
})
s.True(numChildChunks > 0)
test(l)
}
-31
View File
@@ -1,31 +0,0 @@
// Copyright 2017 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
)
func nomsStats(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
stats := noms.Command("stats", "Shows stats summary for a Noms Database")
database := stats.Arg("database", "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the database argument.").Required().String()
return stats, func(input string) int {
cfg := config.NewResolver()
store, err := cfg.GetDatabase(ctx, *database)
d.CheckError(err)
defer store.Close()
fmt.Println(store.StatsSummary())
return 0
}
}
-144
View File
@@ -1,144 +0,0 @@
// Copyright 2018 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"github.com/attic-labs/kingpin"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/diff"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
)
func nomsStruct(ctx context.Context, noms *kingpin.Application) (*kingpin.CmdClause, util.KingpinHandler) {
strukt := noms.Command("struct", "interact with structs")
struktNew := strukt.Command("new", "creates a new struct")
newDb := struktNew.Arg("database", "spec to db to create struct within").Required().String()
newName := struktNew.Flag("name", "name for new struct").String()
newFields := struktNew.Arg("fields", "key/value pairs for field names and values").Strings()
struktSet := strukt.Command("set", "sets one or more fields of a struct")
setSpec := struktSet.Arg("spec", "value spec for the struct to edit").Required().String()
setFields := struktSet.Arg("fields", "key/value pairs for field names and values").Strings()
struktDel := strukt.Command("del", "removes one or more fields from a struct")
delSpec := struktDel.Arg("spec", "value spec for the struct to edit").Required().String()
delFields := struktDel.Arg("fields", "fields to be removed").Strings()
return strukt, func(input string) int {
switch input {
case struktNew.FullCommand():
return nomsStructNew(ctx, *newDb, *newName, *newFields)
case struktSet.FullCommand():
return nomsStructSet(ctx, *setSpec, *setFields)
case struktDel.FullCommand():
return nomsStructDel(ctx, *delSpec, *delFields)
}
d.Panic("notreached")
return 1
}
}
func nomsStructNew(ctx context.Context, dbStr string, name string, args []string) int {
sp, err := spec.ForDatabase(dbStr)
d.PanicIfError(err)
applyStructEdits(ctx, sp, types.NewStruct(name, nil), nil, args)
return 0
}
func nomsStructSet(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
applyStructEdits(ctx, sp, rootVal, basePath, args)
return 0
}
func nomsStructDel(ctx context.Context, specStr string, args []string) int {
sp, err := spec.ForPath(specStr)
d.PanicIfError(err)
rootVal, basePath := splitPath(ctx, sp)
patch := diff.Patch{}
for i := 0; i < len(args); i++ {
if !types.IsValidStructFieldName(args[i]) {
d.CheckError(fmt.Errorf("Invalid field name: %s at position: %d", args[i], i))
}
patch = append(patch, diff.Difference{
Path: append(basePath, types.FieldPath{Name: args[i]}),
ChangeType: types.DiffChangeRemoved,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
return 0
}
func splitPath(ctx context.Context, sp spec.Spec) (rootVal types.Value, basePath types.Path) {
db := sp.GetDatabase(ctx)
rootPath := sp.Path
rootPath.Path = types.Path{}
rootVal = rootPath.Resolve(ctx, db)
if rootVal == nil {
d.CheckError(fmt.Errorf("Invalid path: %s", sp.String()))
return
}
basePath = sp.Path.Path
return
}
func applyStructEdits(ctx context.Context, sp spec.Spec, rootVal types.Value, basePath types.Path, args []string) {
if len(args)%2 != 0 {
d.CheckError(fmt.Errorf("Must be an even number of key/value pairs"))
}
if rootVal == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at: %s", sp.String()))
return
}
db := sp.GetDatabase(ctx)
patch := diff.Patch{}
for i := 0; i < len(args); i += 2 {
if !types.IsValidStructFieldName(args[i]) {
d.CheckError(fmt.Errorf("Invalid field name: %s at position: %d", args[i], i))
}
nv, err := argumentToValue(ctx, args[i+1], db)
if err != nil {
d.CheckError(fmt.Errorf("Invalid field value: %s at position %d: %s", args[i+1], i+1, err))
}
patch = append(patch, diff.Difference{
Path: append(basePath, types.FieldPath{Name: args[i]}),
ChangeType: types.DiffChangeModified,
NewValue: nv,
})
}
appplyPatch(ctx, sp, rootVal, basePath, patch)
}
func appplyPatch(ctx context.Context, sp spec.Spec, rootVal types.Value, basePath types.Path, patch diff.Patch) {
db := sp.GetDatabase(ctx)
baseVal := basePath.Resolve(ctx, rootVal, db)
if baseVal == nil {
d.CheckErrorNoUsage(fmt.Errorf("No value at: %s", sp.String()))
}
newRootVal := diff.Apply(ctx, rootVal, patch)
d.Chk.NotNil(newRootVal)
r := db.WriteValue(ctx, newRootVal)
db.Flush(ctx)
newAbsPath := spec.AbsolutePath{
Hash: r.TargetHash(),
Path: basePath,
}
newSpec := sp
newSpec.Path = newAbsPath
fmt.Println(newSpec.String())
}
-127
View File
@@ -1,127 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/config"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/profile"
"github.com/attic-labs/noms/go/util/status"
"github.com/attic-labs/noms/go/util/verbose"
humanize "github.com/dustin/go-humanize"
flag "github.com/juju/gnuflag"
)
var (
p int
)
var nomsSync = &util.Command{
Run: runSync,
UsageLine: "sync [options] <source-object> <dest-dataset>",
Short: "Moves datasets between or within databases",
Long: "See Spelling Objects at https://github.com/attic-labs/noms/blob/master/doc/spelling.md for details on the object and dataset arguments.",
Flags: setupSyncFlags,
Nargs: 2,
}
func setupSyncFlags() *flag.FlagSet {
syncFlagSet := flag.NewFlagSet("sync", flag.ExitOnError)
syncFlagSet.IntVar(&p, "p", 512, "parallelism")
verbose.RegisterVerboseFlags(syncFlagSet)
profile.RegisterProfileFlags(syncFlagSet)
return syncFlagSet
}
func runSync(ctx context.Context, args []string) int {
cfg := config.NewResolver()
sourceStore, sourceObj, err := cfg.GetPath(ctx, args[0])
d.CheckError(err)
defer sourceStore.Close()
if sourceObj == nil {
d.CheckErrorNoUsage(fmt.Errorf("Object not found: %s", args[0]))
}
sinkDB, sinkDataset, err := cfg.GetDataset(ctx, args[1])
d.CheckError(err)
defer sinkDB.Close()
start := time.Now()
progressCh := make(chan datas.PullProgress)
lastProgressCh := make(chan datas.PullProgress)
go func() {
var last datas.PullProgress
for info := range progressCh {
last = info
if info.KnownCount == 1 {
// It's better to print "up to date" than "0% (0/1); 100% (1/1)".
continue
}
if status.WillPrint() {
pct := 100.0 * float64(info.DoneCount) / float64(info.KnownCount)
status.Printf("Syncing - %.2f%% (%s/s)", pct, bytesPerSec(info.ApproxWrittenBytes, start))
}
}
lastProgressCh <- last
}()
sourceRef := types.NewRef(sourceObj)
sinkRef, sinkExists := sinkDataset.MaybeHeadRef()
nonFF := false
err = d.Try(func() {
defer profile.MaybeStartProfile().Stop()
datas.Pull(ctx, sourceStore, sinkDB, sourceRef, progressCh)
var err error
sinkDataset, err = sinkDB.FastForward(ctx, sinkDataset, sourceRef)
if err == datas.ErrMergeNeeded {
sinkDataset, err = sinkDB.SetHead(ctx, sinkDataset, sourceRef)
nonFF = true
}
d.PanicIfError(err)
})
if err != nil {
log.Fatal(err)
}
close(progressCh)
if last := <-lastProgressCh; last.DoneCount > 0 {
status.Printf("Done - Synced %s in %s (%s/s)",
humanize.Bytes(last.ApproxWrittenBytes), since(start), bytesPerSec(last.ApproxWrittenBytes, start))
status.Done()
} else if !sinkExists {
fmt.Printf("All chunks already exist at destination! Created new dataset %s.\n", args[1])
} else if nonFF && !sourceRef.Equals(sinkRef) {
fmt.Printf("Abandoning %s; new head is %s\n", sinkRef.TargetHash(), sourceRef.TargetHash())
} else {
fmt.Printf("Dataset %s is already up to date.\n", args[1])
}
return 0
}
func bytesPerSec(bytes uint64, start time.Time) string {
bps := float64(bytes) / float64(time.Since(start).Seconds())
return humanize.Bytes(uint64(bps))
}
func since(start time.Time) string {
round := time.Second / 100
now := time.Now().Round(round)
return now.Sub(start.Round(round)).String()
}
-156
View File
@@ -1,156 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"os"
"testing"
"github.com/attic-labs/noms/go/datas"
"github.com/attic-labs/noms/go/nbs"
"github.com/attic-labs/noms/go/spec"
"github.com/attic-labs/noms/go/types"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
func TestSync(t *testing.T) {
suite.Run(t, &nomsSyncTestSuite{})
}
type nomsSyncTestSuite struct {
clienttest.ClientTestSuite
}
func (s *nomsSyncTestSuite) TestSyncValidation() {
sourceDB := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir, clienttest.DefaultMemTableSize))
source1 := sourceDB.GetDataset(context.Background(), "src")
source1, err := sourceDB.CommitValue(context.Background(), source1, types.Float(42))
s.NoError(err)
source1HeadRef := source1.Head().Hash()
source1.Database().Close()
sourceSpecMissingHashSymbol := spec.CreateValueSpecString("nbs", s.DBDir, source1HeadRef.String())
sinkDatasetSpec := spec.CreateValueSpecString("nbs", s.DBDir2, "dest")
defer func() {
err := recover()
s.Equal(clienttest.ExitError{1}, err)
}()
s.MustRun(main, []string{"sync", sourceSpecMissingHashSymbol, sinkDatasetSpec})
}
func (s *nomsSyncTestSuite) TestSync() {
defer s.NoError(os.RemoveAll(s.DBDir2))
sourceDB := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir, clienttest.DefaultMemTableSize))
source1 := sourceDB.GetDataset(context.Background(), "src")
source1, err := sourceDB.CommitValue(context.Background(), source1, types.Float(42))
s.NoError(err)
source1HeadRef := source1.Head().Hash() // Remember first head, so we can sync to it.
source1, err = sourceDB.CommitValue(context.Background(), source1, types.Float(43))
s.NoError(err)
sourceDB.Close()
// Pull from a hash to a not-yet-existing dataset in a new DB
sourceSpec := spec.CreateValueSpecString("nbs", s.DBDir, "#"+source1HeadRef.String())
sinkDatasetSpec := spec.CreateValueSpecString("nbs", s.DBDir2, "dest")
sout, _ := s.MustRun(main, []string{"sync", sourceSpec, sinkDatasetSpec})
s.Regexp("Synced", sout)
db := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir2, clienttest.DefaultMemTableSize))
dest := db.GetDataset(context.Background(), "dest")
s.True(types.Float(42).Equals(dest.HeadValue()))
db.Close()
// Pull from a dataset in one DB to an existing dataset in another
sourceDataset := spec.CreateValueSpecString("nbs", s.DBDir, "src")
sout, _ = s.MustRun(main, []string{"sync", sourceDataset, sinkDatasetSpec})
s.Regexp("Synced", sout)
db = datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir2, clienttest.DefaultMemTableSize))
dest = db.GetDataset(context.Background(), "dest")
s.True(types.Float(43).Equals(dest.HeadValue()))
db.Close()
// Pull when sink dataset is already up to date
sout, _ = s.MustRun(main, []string{"sync", sourceDataset, sinkDatasetSpec})
s.Regexp("up to date", sout)
// Pull from a source dataset to a not-yet-existing dataset in another DB, BUT all the needed chunks already exists in the sink.
sinkDatasetSpec = spec.CreateValueSpecString("nbs", s.DBDir2, "dest2")
sout, _ = s.MustRun(main, []string{"sync", sourceDataset, sinkDatasetSpec})
s.Regexp("Created", sout)
db = datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir2, clienttest.DefaultMemTableSize))
dest = db.GetDataset(context.Background(), "dest2")
s.True(types.Float(43).Equals(dest.HeadValue()))
db.Close()
}
func (s *nomsSyncTestSuite) TestSync_Issue2598() {
defer s.NoError(os.RemoveAll(s.DBDir2))
sourceDB := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir, clienttest.DefaultMemTableSize))
// Create dataset "src1", which has a lineage of two commits.
source1 := sourceDB.GetDataset(context.Background(), "src1")
source1, err := sourceDB.CommitValue(context.Background(), source1, types.Float(42))
s.NoError(err)
source1, err = sourceDB.CommitValue(context.Background(), source1, types.Float(43))
s.NoError(err)
// Create dataset "src2", with a lineage of one commit.
source2 := sourceDB.GetDataset(context.Background(), "src2")
source2, err = sourceDB.CommitValue(context.Background(), source2, types.Float(1))
s.NoError(err)
sourceDB.Close() // Close Database backing both Datasets
// Sync over "src1"
sourceDataset := spec.CreateValueSpecString("nbs", s.DBDir, "src1")
sinkDatasetSpec := spec.CreateValueSpecString("nbs", s.DBDir2, "dest")
sout, _ := s.MustRun(main, []string{"sync", sourceDataset, sinkDatasetSpec})
db := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir2, clienttest.DefaultMemTableSize))
dest := db.GetDataset(context.Background(), "dest")
s.True(types.Float(43).Equals(dest.HeadValue()))
db.Close()
// Now, try syncing a second dataset. This crashed in issue #2598
sourceDataset2 := spec.CreateValueSpecString("nbs", s.DBDir, "src2")
sinkDatasetSpec2 := spec.CreateValueSpecString("nbs", s.DBDir2, "dest2")
sout, _ = s.MustRun(main, []string{"sync", sourceDataset2, sinkDatasetSpec2})
db = datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir2, clienttest.DefaultMemTableSize))
dest = db.GetDataset(context.Background(), "dest2")
s.True(types.Float(1).Equals(dest.HeadValue()))
db.Close()
sout, _ = s.MustRun(main, []string{"sync", sourceDataset, sinkDatasetSpec})
s.Regexp("up to date", sout)
}
func (s *nomsSyncTestSuite) TestRewind() {
var err error
sourceDB := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir, clienttest.DefaultMemTableSize))
src := sourceDB.GetDataset(context.Background(), "foo")
src, err = sourceDB.CommitValue(context.Background(), src, types.Float(42))
s.NoError(err)
rewindRef := src.HeadRef().TargetHash()
src, err = sourceDB.CommitValue(context.Background(), src, types.Float(43))
s.NoError(err)
sourceDB.Close() // Close Database backing both Datasets
sourceSpec := spec.CreateValueSpecString("nbs", s.DBDir, "#"+rewindRef.String())
sinkDatasetSpec := spec.CreateValueSpecString("nbs", s.DBDir, "foo")
s.MustRun(main, []string{"sync", sourceSpec, sinkDatasetSpec})
db := datas.NewDatabase(nbs.NewLocalStore(context.Background(), s.DBDir, clienttest.DefaultMemTableSize))
dest := db.GetDataset(context.Background(), "foo")
s.True(types.Float(42).Equals(dest.HeadValue()))
db.Close()
}
-34
View File
@@ -1,34 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"context"
"fmt"
"os"
"github.com/attic-labs/noms/cmd/util"
"github.com/attic-labs/noms/go/constants"
flag "github.com/juju/gnuflag"
)
var nomsVersion = &util.Command{
Run: runVersion,
UsageLine: "version ",
Short: "Display noms version",
Long: "version prints the Noms data version and build identifier",
Flags: setupVersionFlags,
Nargs: 0,
}
func setupVersionFlags() *flag.FlagSet {
return flag.NewFlagSet("version", flag.ExitOnError)
}
func runVersion(ctx context.Context, args []string) int {
fmt.Fprintf(os.Stdout, "format version: %v\n", constants.NomsVersion)
fmt.Fprintf(os.Stdout, "built from %v\n", constants.NomsGitSHA)
return 0
}
-28
View File
@@ -1,28 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"fmt"
"testing"
"github.com/attic-labs/noms/go/constants"
"github.com/attic-labs/noms/go/util/clienttest"
"github.com/stretchr/testify/suite"
)
func TestVersion(t *testing.T) {
suite.Run(t, &nomsVersionTestSuite{})
}
type nomsVersionTestSuite struct {
clienttest.ClientTestSuite
}
func (s *nomsVersionTestSuite) TestVersion() {
val, _ := s.MustRun(main, []string{"version"})
expectedVal := fmt.Sprintf("format version: %v\nbuilt from %v\n", constants.NomsVersion, constants.NomsGitSHA)
s.Equal(val, expectedVal)
}
-70
View File
@@ -1,70 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
// This is the Command struct used by the noms utility. It is packaged in a separate util can be used by other programs as well.
package util
import (
"context"
"fmt"
"os"
"strings"
flag "github.com/juju/gnuflag"
)
type Command struct {
// Run runs the command.
// The args are the arguments after the command name.
Run func(ctx context.Context, args []string) int
// UsageLine is the one-line usage message.
// The first word in the line is taken to be the command name.
UsageLine string
// Short is the short description shown in the 'help' output.
Short string
// Long is the long message shown in the 'help <this-command>' output.
Long string
// Flag is a set of flags specific to this command.
Flags func() *flag.FlagSet
// Nargs is the minimum number of arguments expected after flags, specific to this command.
Nargs int
}
// Name returns the command's name: the first word in the usage line.
func (nc *Command) Name() string {
name := nc.UsageLine
i := strings.Index(name, " ")
if i >= 0 {
name = name[:i]
}
return name
}
func countFlags(flags *flag.FlagSet) int {
if flags == nil {
return 0
} else {
n := 0
flags.VisitAll(func(f *flag.Flag) {
n++
})
return n
}
}
func (nc *Command) Usage() {
fmt.Fprintf(os.Stderr, "usage: %s\n\n", nc.UsageLine)
fmt.Fprintf(os.Stderr, "%s\n", strings.TrimSpace(nc.Long))
flags := nc.Flags()
if countFlags(flags) > 0 {
fmt.Fprintf(os.Stderr, "\noptions:\n")
flags.PrintDefaults()
}
os.Exit(1)
}
-114
View File
@@ -1,114 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
// This is the Help facility used by the noms utility. It is packaged in a separate util can be used by other programs as well.
package util
import (
"bufio"
"fmt"
"io"
"os"
"strings"
"text/template"
)
var usageTemplate = `{{.UsageLine}}
Usage:
{{.ProgName}} command [arguments]
The commands are:
{{range .Commands}}
{{.Name | printf "%-11s"}} {{.Short}}{{end}}
Use "{{.ProgName}} help [command]" for more information about a command.
`
var helpTemplate = `usage: {{.ProgName}} {{.Cmd.UsageLine}}
{{.Cmd.Long | trim}}
`
var (
commands = []*Command{}
usageLine = ""
progName = ""
)
func InitHelp(name string, cmds []*Command, usage string) {
progName = name
commands = cmds
usageLine = usage
}
// tmpl executes the given template text on data, writing the result to w.
func tmpl(w io.Writer, text string, data interface{}) {
t := template.New("top")
t.Funcs(template.FuncMap{"trim": strings.TrimSpace})
template.Must(t.Parse(text))
if err := t.Execute(w, data); err != nil {
panic(err)
}
}
func printUsage(w io.Writer) {
bw := bufio.NewWriter(w)
data := struct {
ProgName string
Commands []*Command
UsageLine string
}{
progName,
commands,
usageLine,
}
tmpl(bw, usageTemplate, data)
bw.Flush()
}
func Usage() {
printUsage(os.Stderr)
os.Exit(1)
}
// help implements the 'help' command.
func Help(args []string) {
if len(args) == 0 {
printUsage(os.Stdout)
// not exit 2: succeeded at 'help'.
return
}
if len(args) != 1 {
fmt.Fprintf(os.Stderr, "usage: %s help command\n\nToo many arguments given.\n", progName)
os.Exit(1) // failed at 'help'
}
arg := args[0]
for _, cmd := range commands {
if cmd.Name() == arg {
data := struct {
ProgName string
Cmd *Command
}{
progName,
cmd,
}
tmpl(os.Stdout, helpTemplate, data)
flags := cmd.Flags()
if countFlags(flags) > 0 {
fmt.Fprintf(os.Stdout, "\noptions:\n")
flags.PrintDefaults()
}
// not exit 2: succeeded at 'help cmd'.
return
}
}
fmt.Fprintf(os.Stderr, "Unknown help topic %#q\n", arg)
Usage() // failed at 'help cmd'
}
-13
View File
@@ -1,13 +0,0 @@
// Copyright 2017 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package util
import (
"context"
"github.com/attic-labs/kingpin"
)
type KingpinHandler func(input string) (exitCode int)
type KingpinCommand func(context.Context, *kingpin.Application) (*kingpin.CmdClause, KingpinHandler)
-53
View File
@@ -1,53 +0,0 @@
module github.com/attic-labs/noms
require (
cloud.google.com/go v0.34.0
github.com/BurntSushi/toml v0.3.1
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 // indirect
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc // indirect
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf // indirect
github.com/attic-labs/graphql v0.0.0-20170223225357-917f92ca24a7
github.com/attic-labs/kingpin v0.0.0-20180312050558-442efcfac769
github.com/aws/aws-sdk-go v1.16.15
github.com/clbanning/mxj v1.8.3
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.0
github.com/edsrzf/mmap-go v1.0.0-20181222142022-904c4ced31cd
github.com/go-ole/go-ole v1.2.2 // indirect
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db
github.com/google/martian v2.1.0+incompatible // indirect
github.com/google/uuid v1.1.0
github.com/googleapis/gax-go v0.0.0-20181219185031-c8a15bac9b9f // indirect
github.com/hanwen/go-fuse v0.0.0-20170609101909-5690be47d614
github.com/ipfs/go-ipfs v0.4.18
github.com/jbenet/go-base58 v0.0.0-20150317085156-6237cf65f3a6
github.com/jpillora/backoff v0.0.0-20170918002102-8eab2debe79d
github.com/jroimartin/gocui v0.4.0
github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d
github.com/julienschmidt/httprouter v1.2.0
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6
github.com/kr/pretty v0.1.0 // indirect
github.com/mattn/go-colorable v0.0.9 // indirect
github.com/mattn/go-isatty v0.0.4
github.com/mattn/go-runewidth v0.0.4 // indirect
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b
github.com/nsf/termbox-go v0.0.0-20190104133558-0938b5187e61 // indirect
github.com/onsi/ginkgo v1.8.0 // indirect
github.com/onsi/gomega v1.5.0 // indirect
github.com/pkg/errors v0.8.1
github.com/pkg/profile v1.3.0
github.com/shirou/gopsutil v2.18.12+incompatible
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4 // indirect
github.com/skratchdot/open-golang v0.0.0-20190104022628-a2dfa6d0dab6
github.com/stretchr/testify v1.3.0
github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2
golang.org/x/net v0.0.0-20190313220215-9f648a60d977
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890
golang.org/x/sys v0.0.0-20190312061237-fead79001313 // indirect
google.golang.org/api v0.1.0
google.golang.org/appengine v1.4.0 // indirect
google.golang.org/genproto v0.0.0-20181221175505-bd9b4fb69e2f // indirect
gopkg.in/alecthomas/kingpin.v2 v2.2.6
)
-174
View File
@@ -1,174 +0,0 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/attic-labs/graphql v0.0.0-20170223225357-917f92ca24a7 h1:Pic9T6o6p3evQEbD1+rE3JV9dGr+c4PG5yYHGe7FVd8=
github.com/attic-labs/graphql v0.0.0-20170223225357-917f92ca24a7/go.mod h1:ReZxnaSh4yvjWxdlmiYnHZTpHIXxd6t1TZMWHVJyzek=
github.com/attic-labs/kingpin v0.0.0-20180312050558-442efcfac769 h1:UXEVwJER4LBEnmUjg5FQ+5/r17sHYHv1dMwP16We/78=
github.com/attic-labs/kingpin v0.0.0-20180312050558-442efcfac769/go.mod h1:Cp18FeDCvsK+cD2QAGkqerGjrgSXLiJWnjHeY2mneBc=
github.com/aws/aws-sdk-go v1.16.15 h1:kQyxfRyjAwIYjf0225sn/pn+WAlncKyI8dmT3+ItMFE=
github.com/aws/aws-sdk-go v1.16.15/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/clbanning/mxj v1.8.3 h1:2r/KCJi52w2MRz+K+UMa/1d7DdCjnLqYJfnbr7dYNWI=
github.com/clbanning/mxj v1.8.3/go.mod h1:BVjHeAH+rl9rs6f+QIpeRl0tfu10SXn1pUSa5PVGJng=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf h1:5ZeQB3mThuz5C2MSER6T5GdtXTF9CMMk42F9BOyRsEQ=
github.com/codahale/blake2 v0.0.0-20150924215134-8d10d0420cbf/go.mod h1:BO2rLUAZMrpgh6GBVKi0Gjdqw2MgCtJrtmUdDeZRKjY=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v1.0.0-20181222142022-904c4ced31cd h1:d4FJbBWRop8iXtFbG/kPJztSXAg1fv52hOLwARc38TY=
github.com/edsrzf/mmap-go v1.0.0-20181222142022-904c4ced31cd/go.mod h1:W3m91qexYIu40kcj8TLXNUSTCKprH8UQ3GgH5/Xyfc0=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-ole/go-ole v1.2.2 h1:QNWhweRd9D5Py2rRVboZ2L4SEoW/dyraWJCc8bgS8kE=
github.com/go-ole/go-ole v1.2.2/go.mod h1:pnvuG7BrDMZ8ifMurTQmxwhQM/odqm9sSqNe5BUI7v4=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/uuid v1.1.0 h1:Jf4mxPC/ziBnoPIdpQdPJ9OeiomAUHLvxmPRSPH9m4s=
github.com/google/uuid v1.1.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go v0.0.0-20181219185031-c8a15bac9b9f h1:UagDZv2cTLFNxuFiG5WvbbxtKTwbLWec8QUnFNqS6Ho=
github.com/googleapis/gax-go v0.0.0-20181219185031-c8a15bac9b9f/go.mod h1:5VvnLYVimBt+hOVlFtJDkYQHVmk4K27qHHioZjPbYAI=
github.com/googleapis/gax-go/v2 v2.0.2 h1:/rNgUniLy2vDXiK2xyJOcirGpC3G99dtK1NWx26WZ8Y=
github.com/googleapis/gax-go/v2 v2.0.2/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg=
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/hanwen/go-fuse v0.0.0-20170609101909-5690be47d614 h1:757rta3DGvtCMtsI72I+5GHPTCTqcubSyR3g/Yan1dM=
github.com/hanwen/go-fuse v0.0.0-20170609101909-5690be47d614/go.mod h1:4ZJ05v9yt5k/mcFkGvSPKJB5T8G/6nuumL63ZqlrPvI=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/ipfs/go-ipfs v0.4.18 h1:QBpaJj4emf63RWnqi/PFRQrwGvanlbNqPHHHfVz8IiU=
github.com/ipfs/go-ipfs v0.4.18/go.mod h1:iXzbK+Wa6eePj3jQg/uY6Uoq5iOwY+GToD/bgaRadto=
github.com/jbenet/go-base58 v0.0.0-20150317085156-6237cf65f3a6 h1:4zOlv2my+vf98jT1nQt4bT/yKWUImevYPJ2H344CloE=
github.com/jbenet/go-base58 v0.0.0-20150317085156-6237cf65f3a6/go.mod h1:r/8JmuR0qjuCiEhAolkfvdZgmPiHTnJaG0UXCSeR1Zo=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jpillora/backoff v0.0.0-20170918002102-8eab2debe79d h1:ix3WmphUvN0GDd0DO9MH0v6/5xTv+Xm1bPN+1UJn58k=
github.com/jpillora/backoff v0.0.0-20170918002102-8eab2debe79d/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
github.com/jroimartin/gocui v0.4.0 h1:52jnalstgmc25FmtGcWqa0tcbMEWS6RpFLsOIO+I+E8=
github.com/jroimartin/gocui v0.4.0/go.mod h1:7i7bbj99OgFHzo7kB2zPb8pXLqMBSQegY7azfqXMkyY=
github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b h1:FQ7+9fxhyp82ks9vAuyPzG0/vVbWwMwLJ+P6yJI5FN8=
github.com/juju/fslock v0.0.0-20160525022230-4d5c94c67b4b/go.mod h1:HMcgvsgd0Fjj4XXDkbjdmlbI505rUPBs6WBMYg2pXks=
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d h1:c93kUJDtVAXFEhsCh5jSxyOJmFHuzcihnslQiX8Urwo=
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
github.com/julienschmidt/httprouter v1.2.0 h1:TDTW5Yz1mjftljbcKqRcrYhd4XeOoI98t+9HbQbYf7g=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6 h1:l6Y3mFnF46A+CeZsTrT8kVIuhayq1266oxWpDKE7hnQ=
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6/go.mod h1:UtDV9qK925GVmbdjR+e1unqoo+wGWNHHC6XB1Eu6wpE=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/nsf/termbox-go v0.0.0-20190104133558-0938b5187e61 h1:pEzZYac/uQ4cgaN1Q/UYZg+ZtCSWz2HQ3rvl8MeN9MA=
github.com/nsf/termbox-go v0.0.0-20190104133558-0938b5187e61/go.mod h1:IuKpRQcYE1Tfu+oAQqaLisqDeXgjyyltCfsaoYN18NQ=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0 h1:VkHVNpR4iVnU8XQR6DBm8BqYjN7CRzw+xKUbVVbbW9w=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.3.0 h1:OQIvuDgm00gWVWGTf4m4mCt6W1/0YqU7Ntg0mySWgaI=
github.com/pkg/profile v1.3.0/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/shirou/gopsutil v2.18.12+incompatible h1:1eaJvGomDnH74/5cF4CTmTbLHAriGFsTZppLXDX93OM=
github.com/shirou/gopsutil v2.18.12+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4 h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/skratchdot/open-golang v0.0.0-20190104022628-a2dfa6d0dab6 h1:cGT4dcuEyBwwu/v6tosyqcDp2yoIo/LwjMGixUvg3nU=
github.com/skratchdot/open-golang v0.0.0-20190104022628-a2dfa6d0dab6/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2 h1:GnOzE5fEFN3b2zDhJJABEofdb51uMRNb8eqIVtdducs=
github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0=
go.opencensus.io v0.18.0 h1:Mk5rgZcggtbvtAun5aJzAtjKKN/t0R3jJPlWILlv938=
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190313220215-9f648a60d977 h1:actzWV6iWn3GLqN8dZjzsB+CLt+gaV2+wsxroxiQI8I=
golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890 h1:uESlIz09WIHT2I+pasSXcpLYqYK8wHcdCetU3VuMBJE=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f h1:Bl/8QSvNqXvPGPGXa2z5xUTmV7VDcZyvRZ+QQXkXTZQ=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313 h1:pczuHS43Cp2ktBEEmLwScxgjWsBSzdaQiKzUyf3DTTc=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.1.0 h1:K6z2u68e86TPdSdefXdzvXgR1zEMa+459vBSfWYAZkI=
google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg=
google.golang.org/genproto v0.0.0-20181221175505-bd9b4fb69e2f h1:eT3B0O2ghdSPzjAOznr3oOLyN1HFeYUncYl7FRwg4VI=
google.golang.org/genproto v0.0.0-20181221175505-bd9b4fb69e2f/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
google.golang.org/grpc v1.17.0 h1:TRJYBgMclJvGYn2rIMjj+h9KtMt5r1Ij7ODVRIZkwhk=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
Generated
+472
View File
@@ -0,0 +1,472 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
digest = "1:197df5f5c8728853faed5c09293181a8f69cffffa5b6610ba6e0acf21b51f6bd"
name = "cloud.google.com/go"
packages = [
"compute/metadata",
"iam",
"internal",
"internal/optional",
"internal/trace",
"internal/version",
"storage",
]
pruneopts = "UT"
revision = "6cb29e61d96723a38dcac44d4c15c36744d96d07"
version = "v0.29.0"
[[projects]]
branch = "master"
digest = "1:f6e569e4a0c5d9c7fab4a9613cf55ac0e2160c17cc1eae1c96b78b842619c64a"
name = "github.com/attic-labs/graphql"
packages = [
".",
"gqlerrors",
"language/ast",
"language/kinds",
"language/lexer",
"language/location",
"language/parser",
"language/printer",
"language/source",
"language/typeInfo",
"language/visitor",
]
pruneopts = "UT"
revision = "917f92ca24a759a0e3bfd1b135850f9b0c04682e"
[[projects]]
branch = "master"
digest = "1:83a39a9a1da06870e069dc6f0b51d9bb799dac2330ccfe3055c57d9f2338ec6d"
name = "github.com/attic-labs/noms"
packages = [
"go/blobstore",
"go/chunks",
"go/constants",
"go/d",
"go/datas",
"go/diff",
"go/hash",
"go/marshal",
"go/merge",
"go/metrics",
"go/nbs",
"go/ngql",
"go/nomdl",
"go/sloppy",
"go/spec",
"go/types",
"go/util/exit",
"go/util/functions",
"go/util/random",
"go/util/sizecache",
"go/util/status",
"go/util/verbose",
"go/util/writers",
]
pruneopts = "UT"
revision = "599ff4e45090e6117908737a4d821b155ead9780"
source = "git@github.com:liquidata-inc/noms.git"
[[projects]]
digest = "1:a7e9d148fc9d6a1b19336e0524e0ebe0f3486e215390f2947d80a19a91884c34"
name = "github.com/aws/aws-sdk-go"
packages = [
"aws",
"aws/awserr",
"aws/awsutil",
"aws/client",
"aws/client/metadata",
"aws/corehandlers",
"aws/credentials",
"aws/credentials/ec2rolecreds",
"aws/credentials/endpointcreds",
"aws/credentials/stscreds",
"aws/csm",
"aws/defaults",
"aws/ec2metadata",
"aws/endpoints",
"aws/request",
"aws/session",
"aws/signer/v4",
"internal/sdkio",
"internal/sdkrand",
"internal/sdkuri",
"internal/shareddefaults",
"private/protocol",
"private/protocol/eventstream",
"private/protocol/eventstream/eventstreamapi",
"private/protocol/json/jsonutil",
"private/protocol/jsonrpc",
"private/protocol/query",
"private/protocol/query/queryutil",
"private/protocol/rest",
"private/protocol/restxml",
"private/protocol/xml/xmlutil",
"service/dynamodb",
"service/s3",
"service/sts",
]
pruneopts = "UT"
revision = "a5158a897b80d434ed8704bb42fed46ff9bf8b91"
version = "v1.15.44"
[[projects]]
digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec"
name = "github.com/davecgh/go-spew"
packages = ["spew"]
pruneopts = "UT"
revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73"
version = "v1.1.1"
[[projects]]
branch = "master"
digest = "1:6f9339c912bbdda81302633ad7e99a28dfa5a639c864061f1929510a9a64aa74"
name = "github.com/dustin/go-humanize"
packages = ["."]
pruneopts = "UT"
revision = "9f541cc9db5d55bce703bd99987c9d5cb8eea45e"
[[projects]]
digest = "1:865079840386857c809b72ce300be7580cb50d3d3129ce11bf9aa6ca2bc1934a"
name = "github.com/fatih/color"
packages = ["."]
pruneopts = "UT"
revision = "5b77d2a35fb0ede96d138fc9a99f5c9b6aef11b4"
version = "v1.7.0"
[[projects]]
digest = "1:5abd6a22805b1919f6a6bca0ae58b13cef1f3412812f38569978f43ef02743d4"
name = "github.com/go-ini/ini"
packages = ["."]
pruneopts = "UT"
revision = "5cf292cae48347c2490ac1a58fe36735fb78df7e"
version = "v1.38.2"
[[projects]]
digest = "1:5d1b5a25486fc7d4e133646d834f6fca7ba1cef9903d40e7aa786c41b89e9e91"
name = "github.com/golang/protobuf"
packages = [
"proto",
"protoc-gen-go/descriptor",
"ptypes",
"ptypes/any",
"ptypes/duration",
"ptypes/timestamp",
]
pruneopts = "UT"
revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5"
version = "v1.2.0"
[[projects]]
branch = "master"
digest = "1:4a0c6bb4805508a6287675fac876be2ac1182539ca8a32468d8128882e9d5009"
name = "github.com/golang/snappy"
packages = ["."]
pruneopts = "UT"
revision = "2e65f85255dbc3072edf28d6b5b8efc472979f5a"
[[projects]]
digest = "1:3a26588bc48b96825977c1b3df964f8fd842cd6860cc26370588d3563433cf11"
name = "github.com/google/uuid"
packages = ["."]
pruneopts = "UT"
revision = "d460ce9f8df2e77fb1ba55ca87fafed96c607494"
version = "v1.0.0"
[[projects]]
digest = "1:e145e9710a10bc114a6d3e2738aadf8de146adaa031854ffdf7bbfe15da85e63"
name = "github.com/googleapis/gax-go"
packages = ["."]
pruneopts = "UT"
revision = "317e0006254c44a0ac427cc52a0e083ff0b9622f"
version = "v2.0.0"
[[projects]]
digest = "1:e22af8c7518e1eab6f2eab2b7d7558927f816262586cd6ed9f349c97a6c285c4"
name = "github.com/jmespath/go-jmespath"
packages = ["."]
pruneopts = "UT"
revision = "0b12d6b5"
[[projects]]
digest = "1:b6bbd2f9e0724bd81890c8644259f920c6d61c08453978faff0bebd25f3e7d3e"
name = "github.com/jpillora/backoff"
packages = ["."]
pruneopts = "UT"
revision = "8eab2debe79d12b7bd3d10653910df25fa9552ba"
version = "1.0.0"
[[projects]]
branch = "master"
digest = "1:b8d72d48e77c5a93e09f82d57cd05a30c302ff0835388b0b7745f4f9cf3e0652"
name = "github.com/juju/gnuflag"
packages = ["."]
pruneopts = "UT"
revision = "2ce1bb71843d6d179b3f1c1c9cb4a72cd067fc65"
[[projects]]
digest = "1:ccfa094742ce1c97fd3f6481e1bf98f3d9862510eee2bc0eb56e2745396bd330"
name = "github.com/julienschmidt/httprouter"
packages = ["."]
pruneopts = "UT"
revision = "8c199fb6259ffc1af525cc3ad52ee60ba8359669"
version = "v1.1"
[[projects]]
branch = "master"
digest = "1:975079ef1a4b94c23122af1c18891ef9518b47f9fa30e8905b34802c5d7c7adc"
name = "github.com/kch42/buzhash"
packages = ["."]
pruneopts = "UT"
revision = "9bdec3dec7c611fa97beadc374d75bdf02cd880e"
[[projects]]
digest = "1:c658e84ad3916da105a761660dcaeb01e63416c8ec7bc62256a9b411a05fcd67"
name = "github.com/mattn/go-colorable"
packages = ["."]
pruneopts = "UT"
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
version = "v0.0.9"
[[projects]]
digest = "1:0981502f9816113c9c8c4ac301583841855c8cf4da8c72f696b3ebedf6d0e4e5"
name = "github.com/mattn/go-isatty"
packages = ["."]
pruneopts = "UT"
revision = "6ca4dbf54d38eea1a992b3c722a76a5d1c4cb25c"
version = "v0.0.4"
[[projects]]
digest = "1:40e195917a951a8bf867cd05de2a46aaf1806c50cf92eebf4c16f78cd196f747"
name = "github.com/pkg/errors"
packages = ["."]
pruneopts = "UT"
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
[[projects]]
digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe"
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
pruneopts = "UT"
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
digest = "1:18752d0b95816a1b777505a97f71c7467a8445b8ffb55631a7bf779f6ba4fa83"
name = "github.com/stretchr/testify"
packages = ["assert"]
pruneopts = "UT"
revision = "f35b8ab0b5a2cef36673838d662e249dd9c94686"
version = "v1.2.2"
[[projects]]
branch = "master"
digest = "1:f2ffd421680b0a3f7887501b3c6974bcf19217ecd301d0e2c9b681940ec363d5"
name = "github.com/syndtr/goleveldb"
packages = [
"leveldb",
"leveldb/cache",
"leveldb/comparer",
"leveldb/errors",
"leveldb/filter",
"leveldb/iterator",
"leveldb/journal",
"leveldb/memdb",
"leveldb/opt",
"leveldb/storage",
"leveldb/",
"leveldb/util",
]
pruneopts = "UT"
revision = "ae2bd5eed72d46b28834ec3f60db3a3ebedd8dbd"
[[projects]]
digest = "1:ac5cb21cbe4f095b6e5f1ae5102a85dfd598d39b5ad0d64df3d41ee046586f30"
name = "go.opencensus.io"
packages = [
".",
"internal",
"internal/tagencoding",
"plugin/ochttp",
"plugin/ochttp/propagation/b3",
"stats",
"stats/internal",
"stats/view",
"tag",
"trace",
"trace/internal",
"trace/propagation",
"trace/tracestate",
]
pruneopts = "UT"
revision = "79993219becaa7e29e3b60cb67f5b8e82dee11d6"
version = "v0.17.0"
[[projects]]
branch = "master"
digest = "1:1c14517b2f106c61d75006199b46a46576058661d469658cb0f90739919641d2"
name = "golang.org/x/net"
packages = [
"context",
"context/ctxhttp",
"http/httpguts",
"http2",
"http2/hpack",
"idna",
"internal/timeseries",
"trace",
]
pruneopts = "UT"
revision = "4bcd98cce591d8c7061bf313d7a3cbad05b58549"
[[projects]]
branch = "master"
digest = "1:f645667d687fc8bf228865a2c5455824ef05bad08841e673673ef2bb89ac5b90"
name = "golang.org/x/oauth2"
packages = [
".",
"google",
"internal",
"jws",
"jwt",
]
pruneopts = "UT"
revision = "d2e6202438beef2727060aa7cabdd924d92ebfd9"
[[projects]]
branch = "master"
digest = "1:19f92ce03256cc8a4467054842ec81f081985becd92bbc443e7604dfe801e6a8"
name = "golang.org/x/sys"
packages = ["unix"]
pruneopts = "UT"
revision = "4910a1d54f876d7b22162a85f4d066d3ee649450"
[[projects]]
digest = "1:a2ab62866c75542dd18d2b069fec854577a20211d7c0ea6ae746072a1dccdd18"
name = "golang.org/x/text"
packages = [
"collate",
"collate/build",
"internal/colltab",
"internal/gen",
"internal/tag",
"internal/triegen",
"internal/ucd",
"language",
"secure/bidirule",
"transform",
"unicode/bidi",
"unicode/cldr",
"unicode/norm",
"unicode/rangetable",
]
pruneopts = "UT"
revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
version = "v0.3.0"
[[projects]]
branch = "master"
digest = "1:c26e3951bc86029bc6f272f2afd5408de994fa3115ea1d3e0fe144e3d786b6d2"
name = "google.golang.org/api"
packages = [
"gensupport",
"googleapi",
"googleapi/internal/uritemplates",
"googleapi/transport",
"internal",
"iterator",
"option",
"storage/v1",
"transport/http",
"transport/http/internal/propagation",
]
pruneopts = "UT"
revision = "c21459d81882ee61fcd6631fb94dbd9a29bd4377"
[[projects]]
digest = "1:c8907869850adaa8bd7631887948d0684f3787d0912f1c01ab72581a6c34432e"
name = "google.golang.org/appengine"
packages = [
".",
"internal",
"internal/app_identity",
"internal/base",
"internal/datastore",
"internal/log",
"internal/modules",
"internal/remote_api",
"internal/urlfetch",
"urlfetch",
]
pruneopts = "UT"
revision = "b1f26356af11148e710935ed1ac8a7f5702c7612"
version = "v1.1.0"
[[projects]]
branch = "master"
digest = "1:a7d48ca460ca1b4f6ccd8c95502443afa05df88aee84de7dbeb667a8754e8fa6"
name = "google.golang.org/genproto"
packages = [
"googleapis/api/annotations",
"googleapis/iam/v1",
"googleapis/rpc/code",
"googleapis/rpc/status",
]
pruneopts = "UT"
revision = "c66870c02cf823ceb633bcd05be3c7cda29976f4"
[[projects]]
digest = "1:3dd7996ce6bf52dec6a2f69fa43e7c4cefea1d4dfa3c8ab7a5f8a9f7434e239d"
name = "google.golang.org/grpc"
packages = [
".",
"balancer",
"balancer/base",
"balancer/roundrobin",
"codes",
"connectivity",
"credentials",
"encoding",
"encoding/proto",
"grpclog",
"internal",
"internal/backoff",
"internal/channelz",
"internal/envconfig",
"internal/grpcrand",
"internal/transport",
"keepalive",
"metadata",
"naming",
"peer",
"resolver",
"resolver/dns",
"resolver/passthrough",
"stats",
"status",
"tap",
]
pruneopts = "UT"
revision = "32fb0ac620c32ba40a4626ddf94d90d12cce3455"
version = "v1.14.0"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"github.com/attic-labs/noms/go/datas",
"github.com/attic-labs/noms/go/diff",
"github.com/attic-labs/noms/go/hash",
"github.com/attic-labs/noms/go/marshal",
"github.com/attic-labs/noms/go/spec",
"github.com/attic-labs/noms/go/types",
"github.com/fatih/color",
"github.com/google/uuid",
"github.com/pkg/errors",
]
solver-name = "gps-cdcl"
solver-version = 1
+39
View File
@@ -0,0 +1,39 @@
# Gopkg.toml example
#
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
#
# [prune]
# non-go = false
# go-tests = true
# unused-packages = true
[prune]
go-tests = true
unused-packages = true
[[constraint]]
name = "github.com/attic-labs/noms"
# source = "https://github.com/liquidata-inc/noms.git"
source = "git@github.com:liquidata-inc/noms.git"
[[override]]
name = "github.com/attic-labs/graphql"
branch = "master"
-48
View File
@@ -1,48 +0,0 @@
package blobstore
import (
"bytes"
"context"
"io"
"io/ioutil"
)
// Blobstore is an interface for storing and retrieving blobs of data by key
type Blobstore interface {
Exists(ctx context.Context, key string) (bool, error)
Get(ctx context.Context, key string, br BlobRange) (io.ReadCloser, string, error)
Put(ctx context.Context, key string, reader io.Reader) (string, error)
CheckAndPut(ctx context.Context, expectedVersion, key string, reader io.Reader) (string, error)
}
// GetBytes is a utility method calls bs.Get and handles reading the data from the returned
// io.ReadCloser and closing it.
func GetBytes(ctx context.Context, bs Blobstore, key string, br BlobRange) ([]byte, string, error) {
rc, ver, err := bs.Get(ctx, key, br)
if err != nil || rc == nil {
return nil, ver, err
}
defer rc.Close()
data, err := ioutil.ReadAll(rc)
if err != nil {
return nil, "", err
}
return data, ver, nil
}
// PutBytes is a utility method calls bs.Put by wrapping the supplied []byte in an io.Reader
func PutBytes(ctx context.Context, bs Blobstore, key string, data []byte) (string, error) {
reader := bytes.NewReader(data)
return bs.Put(ctx, key, reader)
}
// CheckAndPutBytes is a utility method calls bs.CheckAndPut by wrapping the supplied []byte
// in an io.Reader
func CheckAndPutBytes(ctx context.Context, bs Blobstore, expectedVersion, key string, data []byte) (string, error) {
reader := bytes.NewReader(data)
return bs.CheckAndPut(ctx, expectedVersion, key, reader)
}
-352
View File
@@ -1,352 +0,0 @@
package blobstore
import (
"bytes"
"context"
"encoding/binary"
"fmt"
"io/ioutil"
"log"
"math/rand"
"reflect"
"testing"
"cloud.google.com/go/storage"
"github.com/google/uuid"
)
const (
key = "test"
rmwRetries = 5
testGCSBucket = ""
)
var (
ctx context.Context
bucket *storage.BucketHandle
)
func init() {
if testGCSBucket != "" {
ctx = context.Background()
gcs, err := storage.NewClient(ctx)
if err != nil {
panic("Could not create GCSBlobstore")
}
bucket = gcs.Bucket(testGCSBucket)
}
}
type BlobstoreTest struct {
bs Blobstore
rmwConcurrency int
rmwIterations int
}
func appendGCSTest(tests []BlobstoreTest) []BlobstoreTest {
if testGCSBucket != "" {
gcsTest := BlobstoreTest{&GCSBlobstore{bucket, uuid.New().String() + "/"}, 4, 4}
tests = append(tests, gcsTest)
}
return tests
}
func appendLocalTest(tests []BlobstoreTest) []BlobstoreTest {
dir, err := ioutil.TempDir("", uuid.New().String())
if err != nil {
panic("Could not create temp dir")
}
return append(tests, BlobstoreTest{NewLocalBlobstore(dir), 10, 20})
}
func newBlobStoreTests() []BlobstoreTest {
var tests []BlobstoreTest
tests = append(tests, BlobstoreTest{NewInMemoryBlobstore(), 10, 20})
tests = appendLocalTest(tests)
tests = appendGCSTest(tests)
return tests
}
func randBytes(size int) []byte {
bytes := make([]byte, size)
rand.Read(bytes)
return bytes
}
func testPutAndGetBack(t *testing.T, bs Blobstore) {
testData := randBytes(32)
ver, err := PutBytes(context.Background(), bs, key, testData)
if err != nil {
t.Errorf("Put failed %v.", err)
}
retrieved, retVer, err := GetBytes(context.Background(), bs, key, BlobRange{})
if err != nil {
t.Errorf("Get failed: %v.", err)
}
if ver != retVer {
t.Errorf("Version doesn't match. Expected: %s Actual: %s.", ver, retVer)
}
if !reflect.DeepEqual(retrieved, testData) {
t.Errorf("Data mismatch.")
}
}
func TestPutAndGetBack(t *testing.T) {
for _, bsTest := range newBlobStoreTests() {
testPutAndGetBack(t, bsTest.bs)
}
}
func testGetMissing(t *testing.T, bs Blobstore) {
_, _, err := GetBytes(context.Background(), bs, key, BlobRange{})
if err == nil || !IsNotFoundError(err) {
t.Errorf("Key should be missing.")
}
}
func TestGetMissing(t *testing.T) {
for _, bsTest := range newBlobStoreTests() {
testGetMissing(t, bsTest.bs)
}
}
func testCheckAndPutError(t *testing.T, bs Blobstore) {
testData := randBytes(32)
badVersion := "bad" //has to be valid hex
_, err := CheckAndPutBytes(context.Background(), bs, badVersion, key, testData)
if err == nil {
t.Errorf("Key should be missing.")
return
} else if !IsCheckAndPutError(err) {
t.Errorf("Should have failed due to version mismatch.")
return
}
cpe, ok := err.(CheckAndPutError)
if !ok {
t.Errorf("Error is not of the expected type")
} else if cpe.Key != key || cpe.ExpectedVersion != badVersion {
t.Errorf("CheckAndPutError does not have expected values - " + cpe.Error())
}
}
func TestCheckAndPutError(t *testing.T) {
for _, bsTest := range newBlobStoreTests() {
testCheckAndPutError(t, bsTest.bs)
}
}
func testCheckAndPut(t *testing.T, bs Blobstore) {
ver, err := CheckAndPutBytes(context.Background(), bs, "", key, randBytes(32))
if err != nil {
t.Errorf("Failed CheckAndPut.")
}
newVer, err := CheckAndPutBytes(context.Background(), bs, ver, key, randBytes(32))
if err != nil {
t.Errorf("Failed CheckAndPut.")
}
_, err = CheckAndPutBytes(context.Background(), bs, newVer, key, randBytes(32))
if err != nil {
t.Errorf("Failed CheckAndPut.")
}
}
func TestCheckAndPut(t *testing.T) {
for _, bsTest := range newBlobStoreTests() {
testCheckAndPut(t, bsTest.bs)
}
}
func readModifyWrite(bs Blobstore, key string, iterations int, doneChan chan int) {
concurrentWrites := 0
for updates, failures := 0, 0; updates < iterations; {
if failures >= rmwRetries {
panic("Having io issues.")
}
data, ver, err := GetBytes(context.Background(), bs, key, BlobRange{})
if err != nil && !IsNotFoundError(err) {
log.Println(err)
failures++
continue
}
dataSize := len(data)
newData := make([]byte, dataSize+1)
copy(newData, data)
newData[dataSize] = byte(dataSize)
_, err = CheckAndPutBytes(context.Background(), bs, ver, key, newData)
if err == nil {
updates++
failures = 0
} else if !IsCheckAndPutError(err) {
log.Println(err)
failures++
} else {
concurrentWrites++
}
}
doneChan <- concurrentWrites
}
func testConcurrentCheckAndPuts(t *testing.T, bsTest BlobstoreTest, key string) {
doneChan := make(chan int)
for n := 0; n < bsTest.rmwConcurrency; n++ {
go readModifyWrite(bsTest.bs, key, bsTest.rmwIterations, doneChan)
}
totalConcurrentWrites := 0
for n := 0; n < bsTest.rmwConcurrency; n++ {
totalConcurrentWrites += <-doneChan
}
// If concurrent writes is 0 this test is pretty shitty
fmt.Println(totalConcurrentWrites, "concurrent writes occurred")
var data []byte
var err error
for i := 0; i < rmwRetries; i++ {
data, _, err = GetBytes(context.Background(), bsTest.bs, key, BlobRange{})
if err == nil {
break
}
}
if err != nil {
t.Errorf("Having IO issues testing concurrent blobstore CheckAndPuts")
return
}
if len(data) != bsTest.rmwIterations*bsTest.rmwConcurrency {
t.Errorf("Output data is not of the correct size. This is caused by bad synchronization where a read/read/write/write has occurred.")
}
for i, v := range data {
if i != int(v) {
t.Errorf("Data does not match the expected output.")
}
}
}
func TestConcurrentCheckAndPuts(t *testing.T) {
for _, bsTest := range newBlobStoreTests() {
if bsTest.rmwIterations*bsTest.rmwConcurrency > 255 {
panic("Test epects less than 255 total updates or it won't work as is.")
}
testConcurrentCheckAndPuts(t, bsTest, uuid.New().String())
}
}
func setupRangeTest(t *testing.T, bs Blobstore, data []byte) {
_, err := PutBytes(context.Background(), bs, key, data)
if err != nil {
t.FailNow()
}
}
func testGetRange(t *testing.T, bs Blobstore, br BlobRange, expected []byte) {
retrieved, _, err := GetBytes(context.Background(), bs, key, br)
if err != nil {
t.Errorf("Get failed: %v.", err)
}
if len(retrieved) != len(expected) {
t.Errorf("Range results are not the right size")
return
}
for i := 0; i < len(expected); i++ {
if retrieved[i] != expected[i] {
t.Errorf("Bad Value")
return
}
}
}
func rangeData(min, max int64) []byte {
if max <= min {
panic("no")
}
size := max - min
data := make([]byte, 2*size)
b := bytes.NewBuffer(data[:0])
for i := int16(min); i < int16(max); i++ {
binary.Write(b, binary.BigEndian, i)
}
return data
}
func TestGetRange(t *testing.T) {
maxValue := int64(16 * 1024)
testData := rangeData(0, maxValue)
tests := newBlobStoreTests()
for _, bsTest := range tests {
setupRangeTest(t, bsTest.bs, testData)
}
// test full range
for _, bsTest := range tests {
testGetRange(t, bsTest.bs, AllRange, rangeData(0, maxValue))
}
// test first 2048 bytes (1024 shorts)
for _, bsTest := range tests {
testGetRange(t, bsTest.bs, NewBlobRange(0, 2048), rangeData(0, 1024))
}
// test range of values from 1024 to 2048 stored in bytes 2048 to 4096 of the original testData
for _, bsTest := range tests {
testGetRange(t, bsTest.bs, NewBlobRange(2*1024, 2*1024), rangeData(1024, 2048))
}
// test the last 2048 bytes of data which will be the last 1024 shorts
for _, bsTest := range tests {
testGetRange(t, bsTest.bs, NewBlobRange(-2*1024, 0), rangeData(maxValue-1024, maxValue))
}
// test the range beginning 2048 bytes from the end of size 512 which will be shorts 1024 from the end til 768 from the end
for _, bsTest := range tests {
testGetRange(t, bsTest.bs, NewBlobRange(-2*1024, 512), rangeData(maxValue-1024, maxValue-768))
}
}
func TestPanicOnNegativeRangeLength(t *testing.T) {
defer func() {
if r := recover(); r == nil {
t.Errorf("The code did not panic")
}
}()
NewBlobRange(0, -1)
}
-45
View File
@@ -1,45 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package chunks
import (
"bytes"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSerializeRoundTrip(t *testing.T) {
assert := assert.New(t)
inputs := [][]byte{[]byte("abc"), []byte("def")}
chnx := make([]Chunk, len(inputs))
for i, data := range inputs {
chnx[i] = NewChunk(data)
}
buf := &bytes.Buffer{}
Serialize(chnx[0], buf)
Serialize(chnx[1], buf)
chunkChan := make(chan *Chunk)
go func() {
defer close(chunkChan)
err := Deserialize(bytes.NewReader(buf.Bytes()), chunkChan)
assert.NoError(err)
}()
for c := range chunkChan {
assert.Equal(chnx[0].Hash(), c.Hash())
chnx = chnx[1:]
}
assert.Len(chnx, 0)
}
func TestBadSerialization(t *testing.T) {
bad := []byte{0, 1} // Not enough bytes to read first length
ch := make(chan *Chunk)
defer close(ch)
assert.Error(t, Deserialize(bytes.NewReader(bad), ch))
}
-99
View File
@@ -1,99 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package chunks
import (
"context"
"github.com/stretchr/testify/suite"
"github.com/attic-labs/noms/go/constants"
"github.com/attic-labs/noms/go/hash"
)
type ChunkStoreTestSuite struct {
suite.Suite
Factory Factory
}
func (suite *ChunkStoreTestSuite) TestChunkStorePut() {
store := suite.Factory.CreateStore(context.Background(), "ns")
input := "abc"
c := NewChunk([]byte(input))
store.Put(context.Background(), c)
h := c.Hash()
// Reading it via the API should work.
assertInputInStore(input, h, store, suite.Assert())
}
func (suite *ChunkStoreTestSuite) TestChunkStoreRoot() {
store := suite.Factory.CreateStore(context.Background(), "ns")
oldRoot := store.Root(context.Background())
suite.True(oldRoot.IsEmpty())
bogusRoot := hash.Parse("8habda5skfek1265pc5d5l1orptn5dr0")
newRoot := hash.Parse("8la6qjbh81v85r6q67lqbfrkmpds14lg")
// Try to update root with bogus oldRoot
result := store.Commit(context.Background(), newRoot, bogusRoot)
suite.False(result)
// Now do a valid root update
result = store.Commit(context.Background(), newRoot, oldRoot)
suite.True(result)
}
func (suite *ChunkStoreTestSuite) TestChunkStoreCommitPut() {
name := "ns"
store := suite.Factory.CreateStore(context.Background(), name)
input := "abc"
c := NewChunk([]byte(input))
store.Put(context.Background(), c)
h := c.Hash()
// Reading it via the API should work...
assertInputInStore(input, h, store, suite.Assert())
// ...but it shouldn't be persisted yet
assertInputNotInStore(input, h, suite.Factory.CreateStore(context.Background(), name), suite.Assert())
store.Commit(context.Background(), h, store.Root(context.Background())) // Commit persists Chunks
assertInputInStore(input, h, store, suite.Assert())
assertInputInStore(input, h, suite.Factory.CreateStore(context.Background(), name), suite.Assert())
}
func (suite *ChunkStoreTestSuite) TestChunkStoreGetNonExisting() {
store := suite.Factory.CreateStore(context.Background(), "ns")
h := hash.Parse("11111111111111111111111111111111")
c := store.Get(context.Background(), h)
suite.True(c.IsEmpty())
}
func (suite *ChunkStoreTestSuite) TestChunkStoreVersion() {
store := suite.Factory.CreateStore(context.Background(), "ns")
oldRoot := store.Root(context.Background())
suite.True(oldRoot.IsEmpty())
newRoot := hash.Parse("11111222223333344444555556666677")
suite.True(store.Commit(context.Background(), newRoot, oldRoot))
suite.Equal(constants.NomsVersion, store.Version())
}
func (suite *ChunkStoreTestSuite) TestChunkStoreCommitUnchangedRoot() {
store1, store2 := suite.Factory.CreateStore(context.Background(), "ns"), suite.Factory.CreateStore(context.Background(), "ns")
input := "abc"
c := NewChunk([]byte(input))
store1.Put(context.Background(), c)
h := c.Hash()
// Reading c from store1 via the API should work...
assertInputInStore(input, h, store1, suite.Assert())
// ...but not store2.
assertInputNotInStore(input, h, store2, suite.Assert())
store1.Commit(context.Background(), store1.Root(context.Background()), store1.Root(context.Background()))
store2.Rebase(context.Background())
// Now, reading c from store2 via the API should work...
assertInputInStore(input, h, store2, suite.Assert())
}
-51
View File
@@ -1,51 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package chunks
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestChunk(t *testing.T) {
c := NewChunk([]byte("abc"))
h := c.Hash()
// See http://www.di-mgt.com.au/sha_testvectors.html
assert.Equal(t, "rmnjb8cjc5tblj21ed4qs821649eduie", h.String())
}
func TestChunkWriteAfterCloseFails(t *testing.T) {
assert := assert.New(t)
input := "abc"
w := NewChunkWriter()
_, err := w.Write([]byte(input))
assert.NoError(err)
assert.NoError(w.Close())
assert.Panics(func() { w.Write([]byte(input)) }, "Write() after Close() should barf!")
}
func TestChunkWriteAfterChunkFails(t *testing.T) {
assert := assert.New(t)
input := "abc"
w := NewChunkWriter()
_, err := w.Write([]byte(input))
assert.NoError(err)
_ = w.Chunk()
assert.Panics(func() { w.Write([]byte(input)) }, "Write() after Chunk() should barf!")
}
func TestChunkChunkCloses(t *testing.T) {
assert := assert.New(t)
input := "abc"
w := NewChunkWriter()
_, err := w.Write([]byte(input))
assert.NoError(err)
w.Chunk()
assert.Panics(func() { w.Write([]byte(input)) }, "Write() after Close() should barf!")
}
-27
View File
@@ -1,27 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package chunks
import (
"testing"
"github.com/stretchr/testify/suite"
)
func TestMemoryStoreTestSuite(t *testing.T) {
suite.Run(t, &MemoryStoreTestSuite{})
}
type MemoryStoreTestSuite struct {
ChunkStoreTestSuite
}
func (suite *MemoryStoreTestSuite) SetupTest() {
suite.Factory = NewMemoryStoreFactory()
}
func (suite *MemoryStoreTestSuite) TearDownTest() {
suite.Factory.Shutter()
}
-159
View File
@@ -1,159 +0,0 @@
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package chunks
import (
"sync"
"testing"
"github.com/attic-labs/noms/go/hash"
"github.com/stretchr/testify/assert"
)
func TestGetRequestBatch(t *testing.T) {
assert := assert.New(t)
h0 := hash.Parse("00000000000000000000000000000000")
c1 := NewChunk([]byte("abc"))
h1 := c1.Hash()
c2 := NewChunk([]byte("123"))
h2 := c2.Hash()
tally := func(b bool, trueCnt, falseCnt *int) {
if b {
*trueCnt++
} else {
*falseCnt++
}
}
req0chan := make(chan bool, 1)
req1chan := make(chan *Chunk, 1)
req2chan := make(chan bool, 1)
req3chan := make(chan bool, 1)
req4chan := make(chan *Chunk, 1)
defer func() { close(req0chan); close(req1chan); close(req2chan); close(req3chan); close(req4chan) }()
batch := ReadBatch{
h0: []OutstandingRequest{OutstandingAbsent(req0chan), OutstandingGet(req1chan)},
h1: []OutstandingRequest{OutstandingAbsent(req2chan)},
h2: []OutstandingRequest{OutstandingAbsent(req3chan), OutstandingGet(req4chan)},
}
go func() {
for requestedHash, reqs := range batch {
for _, req := range reqs {
if requestedHash == h1 {
req.Satisfy(h1, &c1)
delete(batch, h1)
} else if requestedHash == h2 {
req.Satisfy(h2, &c2)
delete(batch, h2)
}
}
}
batch.Close()
}()
var r0True, r0False, r2True, r2False, r3True, r3False int
b := <-req0chan
tally(b, &r0True, &r0False)
c := <-req1chan
assert.EqualValues(EmptyChunk.Hash(), c.Hash())
b = <-req2chan
tally(b, &r2True, &r2False)
b = <-req3chan
tally(b, &r3True, &r3False)
c = <-req4chan
assert.EqualValues(c2.Hash(), c.Hash())
assert.Equal(1, r0True)
assert.Equal(0, r0False)
assert.Equal(0, r2True)
assert.Equal(1, r2False)
assert.Equal(0, r3True)
assert.Equal(1, r3False)
}
func TestGetManyRequestBatch(t *testing.T) {
assert := assert.New(t)
h0 := hash.Parse("00000000000000000000000000000000")
c1 := NewChunk([]byte("abc"))
h1 := c1.Hash()
c2 := NewChunk([]byte("123"))
h2 := c2.Hash()
chunks := make(chan *Chunk)
hashes := hash.NewHashSet(h0, h1, h2)
wg := &sync.WaitGroup{}
wg.Add(len(hashes))
go func() { wg.Wait(); close(chunks) }()
req := NewGetManyRequest(hashes, wg, chunks)
batch := ReadBatch{
h0: {req.Outstanding()},
h1: {req.Outstanding()},
h2: {req.Outstanding()},
}
go func() {
for reqHash, reqs := range batch {
for _, req := range reqs {
if reqHash == h1 {
req.Satisfy(h1, &c1)
delete(batch, h1)
} else if reqHash == h2 {
req.Satisfy(h2, &c2)
delete(batch, h2)
}
}
}
batch.Close()
}()
for c := range chunks {
hashes.Remove(c.Hash())
}
assert.Len(hashes, 1)
assert.True(hashes.Has(h0))
}
func TestAbsentManyRequestBatch(t *testing.T) {
assert := assert.New(t)
h0 := hash.Parse("00000000000000000000000000000000")
c1 := NewChunk([]byte("abc"))
h1 := c1.Hash()
c2 := NewChunk([]byte("123"))
h2 := c2.Hash()
found := make(chan hash.Hash)
hashes := hash.NewHashSet(h0, h1, h2)
wg := &sync.WaitGroup{}
wg.Add(len(hashes))
go func() { wg.Wait(); close(found) }()
req := NewAbsentManyRequest(hashes, wg, found)
batch := ReadBatch{}
for h := range req.Hashes() {
batch[h] = []OutstandingRequest{req.Outstanding()}
}
go func() {
for reqHash, reqs := range batch {
for _, req := range reqs {
if reqHash == h1 {
req.Satisfy(h1, &EmptyChunk)
delete(batch, h1)
} else if reqHash == h2 {
req.Satisfy(h2, &EmptyChunk)
delete(batch, h2)
}
}
}
batch.Close()
}()
for h := range found {
hashes.Remove(h)
}
assert.Len(hashes, 1)
assert.True(hashes.Has(h0))
}
+81
View File
@@ -0,0 +1,81 @@
package cli
import (
"flag"
"github.com/liquidata-inc/ld/dolt/go/libraries/set"
"strings"
)
// BoolFlagMap holds a map of flag names to their value pointers which should be filled in by a call to
// flag.Flagset.Parse(args)
type BoolFlagMap struct {
flags map[string]*bool
}
// NewBoolFlagMap iterates over all the argument name and argument description pairs provided in the nameToDesc map
// and creates a bool flag using the flagset. The pointer to the value is stored in an internal map which lives
// within the instance that is returned. After Parse(args) is called on the flag.FlagSet the values of the flags within
// the map will be updated and can be retrieved using GetEqualTo
func NewBoolFlagMap(fs *flag.FlagSet, nameToDesc map[string]string) *BoolFlagMap {
flags := make(map[string]*bool)
for k, v := range nameToDesc {
flags[k] = fs.Bool(k, false, v)
}
return &BoolFlagMap{flags}
}
// GetEqualTo returns a slice of all the names of the flags whose value is equal to the testVal provided.
func (bfm *BoolFlagMap) GetEqualTo(testVal bool) *set.StrSet {
names := make([]string, 0, len(bfm.flags))
for k, v := range bfm.flags {
if *v == testVal {
names = append(names, k)
}
}
return set.NewStrSet(names)
}
func (bfm *BoolFlagMap) Get(flagName string) bool {
return *bfm.flags[flagName]
}
type StrArgMap struct {
args map[string]*string
emptyArgs *set.StrSet
}
func NewStrArgMap(fs *flag.FlagSet, nameToDesc map[string]string) *StrArgMap {
flags := make(map[string]*string)
for k, v := range nameToDesc {
flags[k] = fs.String(k, "", v)
}
return &StrArgMap{flags, nil}
}
func (sfm *StrArgMap) Update() {
sfm.emptyArgs = set.NewStrSet([]string{})
for k, v := range sfm.args {
cleanVal := ""
if v != nil {
cleanVal = strings.TrimSpace(*v)
}
sfm.args[k] = &cleanVal
if cleanVal == "" {
sfm.emptyArgs.Add(k)
}
}
}
func (sfm *StrArgMap) GetEmpty() *set.StrSet {
return sfm.emptyArgs
}
func (sfm *StrArgMap) Get(param string) string {
return *sfm.args[param]
}
+65
View File
@@ -0,0 +1,65 @@
package cli
import (
"flag"
"testing"
)
func initBFMTest(args []string) *BoolFlagMap {
fs := flag.NewFlagSet("app", flag.ExitOnError)
bfm := NewBoolFlagMap(fs, map[string]string{
"key1": "desc1",
"key2": "desc2",
"key3": "desc3",
"key4": "desc4",
"key5": "desc5",
})
fs.Parse(args)
return bfm
}
func TestNewBoolFlagMap(t *testing.T) {
bfm := initBFMTest([]string{"-key1", "-key3"})
trues := bfm.GetEqualTo(true)
falses := bfm.GetEqualTo(false)
expectedTrues := []string{"key1", "key3"}
if !trues.ContainsAll(expectedTrues) {
t.Error("expected trues:", expectedTrues, "actual trues:", trues.AsSlice())
}
expectedFalses := []string{"key2", "key4", "key5"}
if !falses.ContainsAll(expectedFalses) {
t.Error("expected falses:", expectedFalses, "actual falses:", falses.AsSlice())
}
}
func initSAMTest(args []string) *StrArgMap {
fs := flag.NewFlagSet("app", flag.ExitOnError)
sam := NewStrArgMap(fs, map[string]string{
"key1": "desc1",
"key2": "desc2",
"key3": "desc3",
"key4": "desc4",
"key5": "desc5",
})
fs.Parse(args)
sam.Update()
return sam
}
func TestNewArgMap(t *testing.T) {
sam := initSAMTest([]string{"-key1", "val1", "-key3", "val3"})
empty := sam.GetEmpty()
if empty.Size() != 3 || !empty.ContainsAll([]string{"key2", "key4", "key5"}) {
t.Error("Unexpected empty set contents")
}
if sam.Get("key1") != "val1" {
t.Error("Unexpected value for key1")
}
}
+73
View File
@@ -0,0 +1,73 @@
package cli
import (
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"os"
"strings"
)
// CommandFunc specifies the signature of the functions that will be called based on the command line being executed.
type CommandFunc func(string, []string, *env.DoltCLIEnv) int
// Command represents either a command to be run, or a command that is a parent of a subcommand.
type Command struct {
// Name is what the user will type on the command line in order to execute this command
Name string
// Desc is a short description of the command
Desc string
// Func is the CommandFunc that gets called when the user executes this command
Func CommandFunc
// ReqRepo says whether the command must be executed in an initialized dolt data repository directory. This should
// always be set to false for non leaf commands.
ReqRepo bool
}
// MapCommands takes a list of commands and maps them based on the commands name
func MapCommands(commands []*Command) map[string]*Command {
commandMap := make(map[string]*Command, len(commands))
for _, command := range commands {
commandMap[strings.ToLower(command.Name)] = command
}
return commandMap
}
// GenSubCommandHandler returns a handler function that will handle subcommand processing.
func GenSubCommandHandler(commands []*Command) CommandFunc {
commandMap := MapCommands(commands)
return func(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
if len(args) < 1 {
printUsage(commandStr, commands)
return 1
}
subCommandStr := strings.ToLower(strings.TrimSpace(args[0]))
if command, ok := commandMap[subCommandStr]; ok {
if command.ReqRepo {
if !cliEnv.HasLDDir() {
fmt.Fprintln(os.Stderr, color.RedString("The current directory is not a valid dolt repository."))
fmt.Fprintln(os.Stderr, "run: dolt init before trying to run this command")
return 2
}
}
return command.Func(commandStr+" "+subCommandStr, args[1:], cliEnv)
}
fmt.Fprintln(os.Stderr, color.RedString("Unknown Command "+subCommandStr))
printUsage(commandStr, commands)
return 1
}
}
func printUsage(commandStr string, commands []*Command) {
fmt.Println("Valid commands for", commandStr, "are")
for _, command := range commands {
fmt.Printf(" %16s - %s\n", command.Name, command.Desc)
}
}
+91
View File
@@ -0,0 +1,91 @@
package cli
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"reflect"
"strings"
"testing"
)
const (
appName = "app"
)
type trackedCommandFunc struct {
called bool
cmdStr string
args []string
}
func (tf *trackedCommandFunc) wasCalled() bool {
return tf.called
}
func (tf *trackedCommandFunc) commandFunc(cmdStr string, args []string, cliEnv *env.DoltCLIEnv) int {
tf.called = true
tf.cmdStr = cmdStr
tf.args = args
return 0
}
func (tf *trackedCommandFunc) equalsState(called bool, cmdStr string, args []string) bool {
return called == tf.called && cmdStr == tf.cmdStr && reflect.DeepEqual(args, tf.args)
}
func TestCommands(t *testing.T) {
child1 := &trackedCommandFunc{}
grandChild1 := &trackedCommandFunc{}
commands := &Command{appName, "test application", GenSubCommandHandler([]*Command{
{"child1", "first child command", child1.commandFunc, false},
{"child2", "second child command", GenSubCommandHandler([]*Command{
{"grandchild1", "child2's first child", grandChild1.commandFunc, false},
}), false},
}), false}
res := runCommand(commands, "app")
if res == 0 {
t.Error("bad return should be non-zero")
}
res = runCommand(commands, "app invalid")
if res == 0 {
t.Error("bad return. should be non-zero")
}
if !child1.equalsState(false, "", nil) || !grandChild1.equalsState(false, "", nil) {
t.Fatal("Bad initial state")
}
res = runCommand(commands, "app child1 -flag -param=value arg0 arg1")
if !child1.equalsState(true, "app child1", []string{"-flag", "-param=value", "arg0", "arg1"}) ||
!grandChild1.equalsState(false, "", nil) {
t.Fatal("Bad state after running child1")
}
res = runCommand(commands, "app child2 -flag -param=value arg0 arg1")
if !child1.equalsState(true, "app child1", []string{"-flag", "-param=value", "arg0", "arg1"}) ||
!grandChild1.equalsState(false, "", nil) {
t.Fatal("Bad state before running grandChild1")
}
res = runCommand(commands, "app child2 grandchild1 -flag -param=value arg0 arg1")
if !child1.equalsState(true, "app child1", []string{"-flag", "-param=value", "arg0", "arg1"}) ||
!grandChild1.equalsState(true, "app child2 grandchild1", []string{"-flag", "-param=value", "arg0", "arg1"}) {
t.Fatal("Bad state after running grandchild1")
}
}
func runCommand(root *Command, commandLine string) int {
tokens := strings.Split(commandLine, " ")
if tokens[0] != appName {
panic("Invalid test commandh line")
}
return root.Func(appName, tokens[1:], nil)
}
+7
View File
@@ -0,0 +1,7 @@
// Package cli is intended to help in the development of hierarchical multi command, command line applications.
// These typically have command lines that look like:
// app command [<options>]
// app command subcommand [<options>]
// app command subcommand1 subcommand2 [<options>]
// etc.
package cli
+101
View File
@@ -0,0 +1,101 @@
package commands
import (
"flag"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/set"
"os"
"strings"
)
func addUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Add(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = addUsage(fs)
fs.Parse(args)
stagedRoot, workingRoot, verr := getStagedAndWorking(cliEnv)
if verr == nil {
tbls := fs.Args()
if fs.NArg() == 0 {
fmt.Println("Nothing specified, nothing added.\n Maybe you wanted to say 'dolt add .'?")
} else if fs.NArg() == 1 && fs.Arg(0) == "." {
tbls = allTables(stagedRoot, workingRoot)
}
verr = validateTables(tbls, stagedRoot, workingRoot)
if verr == nil {
verr = updateStaged(cliEnv, tbls, stagedRoot, workingRoot)
if verr == nil {
return 0
}
}
}
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
func updateStaged(cliEnv *env.DoltCLIEnv, tbls []string, staged, working *doltdb.RootValue) errhand.VerboseError {
updatedRoot := staged.UpdateTablesFromOther(tbls, working)
return cliEnv.UpdateStagedRoot(updatedRoot)
}
func getStagedAndWorking(cliEnv *env.DoltCLIEnv) (*doltdb.RootValue, *doltdb.RootValue, errhand.VerboseError) {
stagedRoot, err := cliEnv.StagedRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to get staged.").AddCause(err).Build()
}
workingRoot, err := cliEnv.WorkingRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to get working.").AddCause(err).Build()
}
return stagedRoot, workingRoot, nil
}
func validateTables(tbls []string, roots ...*doltdb.RootValue) errhand.VerboseError {
var missing []string
for _, tbl := range tbls {
found := false
for _, root := range roots {
if root.HasTable(tbl) {
found = true
break
}
}
if !found {
missing = append(missing, tbl)
}
}
if len(missing) == 0 {
return nil
}
return errhand.BuildDError("Unknown table(s): %s", strings.Join(missing, " ")).Build()
}
func allTables(stagedRoot, workingRoot *doltdb.RootValue) []string {
allTblNames := make([]string, 0, 16)
allTblNames = append(allTblNames, stagedRoot.GetTableNames()...)
allTblNames = append(allTblNames, workingRoot.GetTableNames()...)
return set.Unique(allTblNames)
}
@@ -0,0 +1,52 @@
package commands
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/dtestutils"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/nbf"
"testing"
)
const (
dataNbfFile = "data.nbf"
table1 = "tbl1"
table2 = "tbl2"
)
func TestAddResetCommitRmCommands(t *testing.T) {
cliEnv := dtestutils.CreateTestEnv()
imt, sch := dtestutils.CreateTestDataTable(true)
imtRd := table.NewInMemTableReader(imt)
fOut, _ := cliEnv.FS.OpenForWrite(dataNbfFile)
nbfWr, _ := nbf.NewNBFWriter(fOut, sch)
table.PipeRows(imtRd, nbfWr, false)
nbfWr.Close()
imtRd.Close()
Version("test")("dolt version", []string{}, cliEnv)
edit.Create("dolt edit create", []string{"-table", table1, dataNbfFile}, cliEnv)
Diff("dolt diff", []string{"-table", table1}, cliEnv)
Status("dolt status", []string{}, cliEnv)
Ls("dolt ls", []string{}, cliEnv)
Add("dolt add", []string{table1}, cliEnv)
Commit("dolt commit", []string{"-m", "Added table"}, cliEnv)
Log("dolt log", []string{}, cliEnv)
edit.RmRow("dolt rm-row", []string{"-table", table1, "id:00000000-0000-0000-0000-000000000001"}, cliEnv)
Add("dolt add", []string{table1}, cliEnv)
Reset("dolt reset", []string{table1}, cliEnv)
Rm("dolt rm", []string{table1}, cliEnv)
}
+1
View File
@@ -0,0 +1 @@
package commands
+1
View File
@@ -0,0 +1 @@
package commands
+50
View File
@@ -0,0 +1,50 @@
package commands
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"path/filepath"
"testing"
)
const (
testHomeDir = "/user/bheni"
workingDir = "/user/bheni/datasets/addresses"
)
func testHomeDirFunc() (string, error) {
return testHomeDir, nil
}
func createTestEnv() *env.DoltCLIEnv {
initialDirs := []string{testHomeDir, filepath.Join(workingDir, env.DoltDir)}
fs := filesys.NewInMemFS(initialDirs, nil, workingDir)
cliEnv := env.Load(testHomeDirFunc, fs, doltdb.InMemDoltDB)
return cliEnv
}
func createUninitializedEnv() *env.DoltCLIEnv {
initialDirs := []string{testHomeDir, workingDir}
fs := filesys.NewInMemFS(initialDirs, nil, workingDir)
cliEnv := env.Load(testHomeDirFunc, fs, doltdb.InMemDoltDB)
return cliEnv
}
func TestCommandsRequireInitializedDir(t *testing.T) {
tests := []struct {
cmdStr string
args []string
commFunc cli.CommandFunc
}{
{"dolt config", []string{"-local", "-list"}, Config},
}
cliEnv := createUninitializedEnv()
for _, test := range tests {
test.commFunc(test.cmdStr, test.args, cliEnv)
}
}
+82
View File
@@ -0,0 +1,82 @@
package commands
import (
"flag"
"fmt"
"github.com/attic-labs/noms/go/hash"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/config"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"os"
)
func commitUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Commit(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = commitUsage(fs)
msg := fs.String("m", "", "The commit message")
fs.Parse(args)
if *msg == "" {
fmt.Fprintln(os.Stderr, color.RedString("Missing required parameter -m"))
fs.Usage()
return 1
}
return processCommit(*msg, cliEnv)
}
func processCommit(msg string, cliEnv *env.DoltCLIEnv) int {
name, email, verr := getNameAndEmail(cliEnv.Config)
if verr == nil {
verr = commitStaged(cliEnv, doltdb.NewCommitMeta(name, email, msg))
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
fmt.Println(color.CyanString("Commit completed successfully."))
return 0
}
func getNameAndEmail(cfg *env.DoltCliConfig) (string, string, errhand.VerboseError) {
name, nmErr := cfg.GetString(env.UserNameKey)
email, emErr := cfg.GetString(env.UserEmailKey)
if nmErr == config.ErrConfigParamNotFound {
bdr := errhand.BuildDError("Could not determine %s.", env.UserNameKey)
bdr.AddDetails("dolt config [-global|local] -set %[1]s:\"FIRST LAST\"", env.UserNameKey)
return "", "", bdr.Build()
} else if emErr == config.ErrConfigParamNotFound {
bdr := errhand.BuildDError("Could not determine %s.", env.UserEmailKey)
bdr.AddDetails("dolt config [-global|local] -set %[1]s:\"EMAIL_ADDRESS\"", env.UserEmailKey)
return "", "", bdr.Build()
}
return name, email, nil
}
func commitStaged(cliEnv *env.DoltCLIEnv, meta *doltdb.CommitMeta) errhand.VerboseError {
h := hash.Parse(cliEnv.RepoState.Staged)
_, err := cliEnv.DoltDB.Commit(h, cliEnv.RepoState.Branch, meta)
if err != nil {
bdr := errhand.BuildDError("Unable to write commit.")
bdr.AddCause(err)
return bdr.Build()
}
return nil
}
+208
View File
@@ -0,0 +1,208 @@
package commands
import (
"errors"
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/config"
"github.com/liquidata-inc/ld/dolt/go/libraries/funcitr"
"os"
"strings"
)
const (
globalParamName = "global"
localParamName = "local"
setOperationStr = "set"
listOperationStr = "list"
getOperationStr = "get"
unsetOperationStr = "unset"
)
func configUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
// Config is used by the config command to allow users to view / edit their global and repository local configurations.
func Config(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = configUsage(fs)
cfgTypeFlagMap := cli.NewBoolFlagMap(fs, map[string]string{
globalParamName: "Use global config file.",
localParamName: "Use repository config file.",
})
opFlagMap := cli.NewBoolFlagMap(fs, map[string]string{
setOperationStr: "Set the value of one or more config parameters",
listOperationStr: "List the values of all config parameters.",
getOperationStr: "Get the value of one or more config parameters.",
unsetOperationStr: "Unset the value of one or more config paramaters.",
})
fs.Parse(args)
cfgTypes := cfgTypeFlagMap.GetEqualTo(true)
ops := opFlagMap.GetEqualTo(true)
switch cfgTypes.Size() {
case 2:
fmt.Fprintln(os.Stderr, color.RedString("Specifying both -local and -global is not valid. Exactly one may be set"))
case 0:
fmt.Fprintln(os.Stderr, color.RedString("One of the -local or -global flags must be set"))
case 1:
switch ops.Size() {
case 1:
lwrArgs := funcitr.MapStrings(fs.Args(), strings.ToLower)
return processConfigCommand(cliEnv, cfgTypes.AsSlice()[0] == globalParamName, ops.AsSlice()[0], lwrArgs)
default:
fmt.Fprintln(os.Stderr, color.RedString("Exactly one of the -set, -get, -unset, -list flags must be set."))
}
}
return 1
}
func processConfigCommand(cliEnv *env.DoltCLIEnv, isGlobal bool, opName string, args []string) int {
switch opName {
case getOperationStr:
return getOperation(cliEnv, isGlobal, args, func(k string, v *string) {
if v == nil {
fmt.Println(k, color.YellowString(" <NOT SET>"))
} else {
fmt.Println(k, "=", *v)
}
})
case setOperationStr:
return setOperation(cliEnv, isGlobal, args)
case unsetOperationStr:
return unsetOperation(cliEnv, isGlobal, args)
case listOperationStr:
return listOperation(cliEnv, isGlobal, func(k string, v string) {
fmt.Println(k, "=", v)
})
}
panic("New operation added but not implemented.")
}
func getOperation(cliEnv *env.DoltCLIEnv, isGlobal bool, args []string, printFn func(string, *string)) int {
if cfg, ok := cliEnv.Config.GetConfig(newCfgElement(isGlobal)); !ok {
fmt.Fprintln(os.Stderr, color.RedString("Unable to read config."))
return 1
} else {
for _, param := range args {
if val, err := cfg.GetString(param); err == nil {
printFn(param, &val)
} else if err == config.ErrConfigParamNotFound {
printFn(param, nil)
} else {
fmt.Fprintln(os.Stderr, color.RedString("Unexpected error: %s", err.Error()))
return 1
}
}
return 0
}
}
func setOperation(cliEnv *env.DoltCLIEnv, isGlobal bool, args []string) int {
updates, err := splitKeyValPairs(args)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Invalid argument format. Usage: dolt config [-local|config] -set key1:value1 ... keyN:valueN"))
return 1
}
if cfg, ok := cliEnv.Config.GetConfig(newCfgElement(isGlobal)); !ok {
if !isGlobal {
err = cliEnv.Config.CreateLocalConfig(updates)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Unable to create repo local config file"))
return 1
}
} else {
panic("Should not have been able to get this far without a global config.")
}
} else {
err = cfg.SetStrings(updates)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Failed to update config."))
return 1
}
}
fmt.Println(color.CyanString("Config successfully updated."))
return 0
}
func splitKeyValPairs(args []string) (map[string]string, error) {
kvps := make(map[string]string)
if kvps != nil {
for _, arg := range args {
colon := strings.IndexByte(arg, ':')
if colon == -1 {
return nil, errors.New(arg + "is not in the format key:value")
}
key := arg[:colon]
value := arg[colon+1:]
kvps[key] = value
}
}
return kvps, nil
}
func unsetOperation(cliEnv *env.DoltCLIEnv, isGlobal bool, args []string) int {
if cfg, ok := cliEnv.Config.GetConfig(newCfgElement(isGlobal)); !ok {
fmt.Fprintln(os.Stderr, color.RedString("Unable to read config."))
return 1
} else {
if len(args) > 0 {
err := cfg.Unset(args)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Error unsetting the keys %v. Error: %s", args, err.Error()))
return 1
}
}
fmt.Println(color.CyanString("Config successfully updated."))
return 0
}
}
func listOperation(cliEnv *env.DoltCLIEnv, isGlobal bool, printFn func(string, string)) int {
if cfg, ok := cliEnv.Config.GetConfig(newCfgElement(isGlobal)); !ok {
fmt.Fprintln(os.Stderr, color.RedString("Unable to read config."))
return 1
} else {
cfg.Iter(func(name string, val string) (stop bool) {
printFn(name, val)
return false
})
return 0
}
}
func newCfgElement(isGlobal bool) env.DoltConfigElement {
if isGlobal {
return env.GlobalConfig
}
return env.LocalConfig
}
+148
View File
@@ -0,0 +1,148 @@
package commands
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/config"
"reflect"
"testing"
)
func TestSplitKeyValuePairs(t *testing.T) {
kvps, err := splitKeyValPairs([]string{})
if err != nil || kvps == nil || len(kvps) != 0 {
t.Error("Failed to split empty args")
}
kvps, err = splitKeyValPairs(nil)
if err != nil || kvps == nil || len(kvps) != 0 {
t.Error("Failed to split empty args")
}
kvps, err = splitKeyValPairs([]string{"key:value", "key2:value:2"})
expected := map[string]string{"key": "value", "key2": "value:2"}
if err != nil {
t.Error("Failed to split args")
} else if !reflect.DeepEqual(kvps, expected) {
t.Error("expected:", expected, "actual:", kvps)
}
kvps, err = splitKeyValPairs([]string{"no_colon_arg"})
if err == nil {
t.Error("Unexpected success.")
}
}
func TestConfig(t *testing.T) {
cliEnv := createTestEnv()
ret := Config("dolt config", []string{"-global", "-set", "name:bheni", "title:dufus"}, cliEnv)
expectedGlobal := map[string]string{
"name": "bheni",
"title": "dufus",
}
if ret != 0 {
t.Error("Failed to set global config")
} else if cfg, ok := cliEnv.Config.GetConfig(env.GlobalConfig); !ok || !config.Equals(cfg, expectedGlobal) {
t.Error("config -set did not yield expected global results")
}
ret = Config("dolt config", []string{"-local", "-set", "title:senior dufus"}, cliEnv)
expectedLocal := map[string]string{
"title": "senior dufus",
}
if ret != 0 {
t.Error("Failed to set local config")
} else if cfg, ok := cliEnv.Config.GetConfig(env.LocalConfig); !ok || !config.Equals(cfg, expectedLocal) {
t.Error("config -set did not yield expected local results")
} else if val, err := cfg.GetString("title"); err != nil || val != "senior dufus" {
t.Error("Unexpected value of \"title\" retrieved from the config hierarchy")
}
ret = Config("dolt config", []string{"-global", "-unset", "name"}, cliEnv)
expectedGlobal = map[string]string{
"title": "dufus",
}
if ret != 0 {
t.Error("Failed to set global config")
} else if cfg, ok := cliEnv.Config.GetConfig(env.GlobalConfig); !ok || !config.Equals(cfg, expectedGlobal) {
t.Error("config -set did not yield expected global results")
}
expectedGlobal = map[string]string{"title": "dufus"}
globalProperties := map[string]string{}
ret = listOperation(cliEnv, true, func(k string, v string) {
globalProperties[k] = v
})
if ret != 0 {
t.Error("Failed to list global config")
} else if !reflect.DeepEqual(globalProperties, expectedGlobal) {
t.Error("listOperation did not yield expected global results")
}
expectedLocal = map[string]string{"title": "senior dufus"}
localProperties := map[string]string{}
ret = listOperation(cliEnv, false, func(k string, v string) {
localProperties[k] = v
})
if ret != 0 {
t.Error("Failed to list local config")
} else if !reflect.DeepEqual(localProperties, expectedLocal) {
t.Error("listOperation did not yield expected local results")
}
ret = getOperation(cliEnv, true, []string{"title"}, func(k string, v *string) {
if v == nil || *v != "dufus" {
t.Error("Failed to get expected value for title.")
}
})
if ret != 0 {
t.Error("get operation failed")
}
ret = getOperation(cliEnv, true, []string{"name"}, func(k string, v *string) {
if v != nil {
t.Error("Failed to get expected value for \"name\" which should not be set in the cofig.")
}
})
if ret != 0 {
t.Error("get operation failed")
}
}
func TestInvalidConfigArgs(t *testing.T) {
cliEnv := createTestEnv()
// local and global flags passed together is invalid
ret := Config("dolt config", []string{"-global", "-local", "-set", "name:bheni", "title:dufus"}, cliEnv)
if ret == 0 {
t.Error("Invalid commands should fail. Command has both local and global")
}
// missing local and global flags is invalid
ret = Config("dolt config", []string{"-set", "name:bheni", "title:dufus"}, cliEnv)
if ret == 0 {
t.Error("Invalid commands should fail. Command is missing local/global")
}
// both -set and -get are used
ret = Config("dolt config", []string{"-global", "-set", "-get", "title"}, cliEnv)
if ret == 0 {
t.Error("Invalid commands should fail. Command is missing local/global")
}
}
+199
View File
@@ -0,0 +1,199 @@
package commands
import (
"flag"
"fmt"
"github.com/attic-labs/noms/go/types"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped/fwt"
"os"
)
func diffUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Diff(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = diffUsage(fs)
tblName := fs.String("table", "", "A table to show")
fs.Parse(args)
if *tblName == "" {
fmt.Fprintln(os.Stderr, "Missing required parameter \"-table\"")
return 1
}
l1, l2, r1, r2, verr := getRoots(fs.Args(), cliEnv)
fmt.Printf("diffing %s vs %s\n\n", color.CyanString(l1), color.BlueString(l2))
if verr == nil {
verr = diffRoots(r1, r2, []string{*tblName}, cliEnv)
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
return 0
}
func getRoots(args []string, cliEnv *env.DoltCLIEnv) (l1, l2 string, r1, r2 *doltdb.RootValue, verr errhand.VerboseError) {
if len(args) > 2 {
bdr := errhand.BuildDError("")
return "", "", nil, nil, bdr.Build()
}
l1 = "working"
l2 = "staged"
if len(args) == 0 {
var err error
r1, err = cliEnv.WorkingRoot()
if err != nil {
verr = errhand.BuildDError("Unable to get working.").AddCause(err).Build()
} else {
r2, err = cliEnv.StagedRoot()
if err != nil {
verr = errhand.BuildDError("Unable to get staged.").AddCause(err).Build()
}
}
} else if len(args) == 1 {
var err error
r1, err = cliEnv.WorkingRoot()
if err != nil {
verr = errhand.BuildDError("Unable to get working").AddCause(err).Build()
} else {
l2, r2, verr = getRootForCommitSpecStr(args[0], cliEnv)
}
} else {
l1, r1, verr = getRootForCommitSpecStr(args[0], cliEnv)
if verr == nil {
l2, r2, verr = getRootForCommitSpecStr(args[1], cliEnv)
}
}
if verr != nil {
return "", "", nil, nil, verr
}
return l1, l2, r1, r2, nil
}
func getRootForCommitSpecStr(csStr string, cliEnv *env.DoltCLIEnv) (string, *doltdb.RootValue, errhand.VerboseError) {
cs, err := doltdb.NewCommitSpec(csStr, cliEnv.RepoState.Branch)
if err != nil {
bdr := errhand.BuildDError(`"%s" is not a validly formatted branch, or commit reference.`, csStr)
return "", nil, bdr.AddCause(err).Build()
}
cm, err := cliEnv.DoltDB.Resolve(cs)
if err != nil {
return "", nil, errhand.BuildDError(`Unable to resolve "%s"`, csStr).AddCause(err).Build()
}
r := cm.GetRootValue()
return cm.HashOf().String(), r, nil
}
func diffRoots(r1, r2 *doltdb.RootValue, tblNames []string, cliEnv *env.DoltCLIEnv) errhand.VerboseError {
for _, tblName := range tblNames {
tbl1, ok1 := r1.GetTable(tblName)
tbl2, ok2 := r2.GetTable(tblName)
if !ok1 && !ok2 {
return errhand.BuildDError("").Build()
}
var sch1 *schema.Schema
var sch2 *schema.Schema
rowData1 := types.NewMap(cliEnv.DoltDB.ValueReadWriter())
rowData2 := types.NewMap(cliEnv.DoltDB.ValueReadWriter())
if ok1 {
sch1 = tbl1.GetSchema(cliEnv.DoltDB.ValueReadWriter())
rowData1 = tbl1.GetRowData()
}
if ok2 {
sch2 = tbl2.GetSchema(cliEnv.DoltDB.ValueReadWriter())
rowData2 = tbl2.GetRowData()
}
verr := diffRows(rowData1, rowData2, sch1, sch2)
if verr != nil {
return verr
}
}
return nil
}
func diffRows(newRows, oldRows types.Map, newSch, oldSch *schema.Schema) errhand.VerboseError {
unionedSch := untyped.UntypedSchemaUnion(newSch, oldSch)
newToUnionConv := table.IdentityConverter
if newSch != nil {
newToUnionMapping, err := schema.NewInferredMapping(newSch, unionedSch)
if err != nil {
return errhand.BuildDError("Error creating unioned mapping").AddCause(err).Build()
}
newToUnionConv, _ = table.NewRowConverter(newToUnionMapping)
}
oldToUnionConv := table.IdentityConverter
if oldSch != nil {
oldToUnionMapping, err := schema.NewInferredMapping(oldSch, unionedSch)
if err != nil {
return errhand.BuildDError("Error creating unioned mapping").AddCause(err).Build()
}
oldToUnionConv, _ = table.NewRowConverter(oldToUnionMapping)
}
ad := doltdb.NewAsyncDiffer(1024)
ad.Start(newRows, oldRows)
defer ad.Close()
rd := doltdb.NewRowDiffReader(ad, oldToUnionConv, newToUnionConv, unionedSch)
defer rd.Close()
fwtTr := fwt.NewAutoSizingFWTTransformer(unionedSch, fwt.HashFillWhenTooLong, 1000)
colorTr := table.NewRowTransformer("coloring transform", doltdb.ColoringTransform)
transforms := []table.TransformFunc{fwtTr.TransformToFWT, colorTr}
wr := doltdb.NewColorDiffWriter(os.Stdout, unionedSch, " | ")
defer wr.Close()
var verr errhand.VerboseError
badRowCB := func(transfName string, row *table.Row, errDetails string) (quit bool) {
verr = errhand.BuildDError("Failed transforming row").AddDetails(transfName).AddDetails(errDetails).Build()
return true
}
pipeline := table.StartAsyncPipeline(rd, transforms, wr, badRowCB)
pipeline.Wait()
return verr
}
+2
View File
@@ -0,0 +1,2 @@
// Package commands contains the command functions executed based on the dolt subcommand specified on the command line.
package commands
+163
View File
@@ -0,0 +1,163 @@
package edit
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"os"
)
const (
tableParam = "table"
outSchemaParam = "schema"
mappingFileParam = "map"
forceParam = "force"
contOnErrParam = "continue"
primaryKeyParam = "pk"
fileTypeParam = "file-type"
)
func createUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func invalidOptions(fs *flag.FlagSet, errFmt string, args ...interface{}) (*mvdata.DataLocation, *mvdata.DataLocation) {
if len(args) == 0 {
fmt.Fprintln(os.Stderr, color.RedString(errFmt))
} else {
fmt.Fprintln(os.Stderr, color.RedString(errFmt, args...))
}
fs.Usage()
return nil, nil
}
func initCreateFlagSet(commandStr string) (*flag.FlagSet, *cli.StrArgMap, *cli.BoolFlagMap) {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = createUsage(fs)
argMap := cli.NewStrArgMap(fs, map[string]string{
outSchemaParam: "The schema for the output data.",
mappingFileParam: "A file that lays out how fields should be mapped from input data to output data",
tableParam: "Destination of where the new data should be imported to.",
primaryKeyParam: "Explicitly define the name of the field in the schema which should be used as the primary key.",
fileTypeParam: "Explicitly define the type of the file if it can't be inferred from the file extension",
})
flagMap := cli.NewBoolFlagMap(fs, map[string]string{
forceParam: "If a create operation is being executed, data already exists in the destination, the Force flag will allow the target to be overwritten",
contOnErrParam: "Continue importing when row import errors are encountered."})
return fs, argMap, flagMap
}
func validateCreateOrExportArgs(fs *flag.FlagSet, args []string, argMap *cli.StrArgMap, flagMap *cli.BoolFlagMap) (*mvdata.DataLocation, *mvdata.DataLocation) {
fs.Parse(args)
argMap.Update()
emptyArgs := argMap.GetEmpty()
for _, required := range []string{tableParam} {
if emptyArgs.Contains(required) {
return invalidOptions(fs, "Missing required paramater -%s", required)
}
}
tableName := argMap.Get(tableParam)
if !doltdb.IsValidTableName(tableName) {
fmt.Fprintln(
os.Stderr,
color.RedString("\"%s\" is not a valid table name\n", tableName),
"table names must match the regular expression", tableParam)
return nil, nil
}
if fs.NArg() != 1 {
return invalidOptions(fs, "Exactly one file must be provided.")
}
path := fs.Arg(0)
fileLoc := mvdata.NewDataLocation(path, argMap.Get(fileTypeParam))
if fileLoc.Format == mvdata.InvalidDataFormat {
return invalidOptions(fs, "Could not infer type from parameter %s. Should be a valid table name or a supported file type.", path)
}
tableLoc := &mvdata.DataLocation{tableName, mvdata.DoltDB}
return fileLoc, tableLoc
}
func Create(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
force, mvOpts := parseCreateArgs(commandStr, args)
if mvOpts == nil {
return 1
}
return executeMove(cliEnv, force, mvOpts)
}
func parseCreateArgs(commandStr string, args []string) (bool, *mvdata.MoveOptions) {
fs, argMap, flagMap := initCreateFlagSet(commandStr)
fileLoc, tableLoc := validateCreateOrExportArgs(fs, args, argMap, flagMap)
if fileLoc == nil || tableLoc == nil {
return false, nil
}
return flagMap.Get(forceParam), &mvdata.MoveOptions{
mvdata.OverwriteOp,
flagMap.Get(contOnErrParam),
argMap.Get(outSchemaParam),
argMap.Get(mappingFileParam),
argMap.Get(primaryKeyParam),
fileLoc,
tableLoc,
}
}
func executeMove(cliEnv *env.DoltCLIEnv, force bool, mvOpts *mvdata.MoveOptions) int {
root, err := cliEnv.WorkingRoot()
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Unable to get the working root value for this data repository."))
return 1
}
if !force && mvOpts.Dest.Exists(root, cliEnv.FS) {
fmt.Fprintln(os.Stderr, color.RedString("The data already exists in %s. Use -f to overwrite.", mvOpts.Dest.Path))
return 1
}
mover, verr := mvdata.NewDataMover(root, cliEnv.FS, mvOpts)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
err = mover.Move()
if err != nil {
fmt.Fprintln(os.Stderr, "An error occurred moving data.", err.Error())
return 1
}
if nomsWr, ok := mover.Wr.(noms.NomsMapWriteCloser); ok {
err = cliEnv.PutTableToWorking(*nomsWr.GetMap(), nomsWr.GetSchema(), mvOpts.Dest.Path)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Failed to update the working value."))
return 1
}
}
return 0
}
+311
View File
@@ -0,0 +1,311 @@
package edit
import (
"fmt"
"github.com/attic-labs/noms/go/types"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/dtestutils"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema/jsonenc"
"github.com/liquidata-inc/ld/dolt/go/libraries/set"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped"
"reflect"
"strconv"
"strings"
"testing"
)
const (
tableName = "result_table"
csvPath = "/user/bheni/file.csv"
psvPath = "/user/bheni/file.psv"
nbfPath = "/user/bheni/file.nbf"
schemaPath = "/user/bheni/schema.json"
mappingPath = "/user/bheni/mapping.json"
)
var fieldNames = []string{"state", "population", "is_state"}
type stateData struct {
name string
population uint64
isState bool
}
func (sd stateData) delimSepVal(delim rune) string {
return fmt.Sprintf("%[2]s%[1]c%[3]d%[1]c%[4]t", delim, sd.name, sd.population, sd.isState)
}
type stateCollection []stateData
func (sc stateCollection) delimSepVals(delim rune) string {
stateStrs := make([]string, len(sc)+1)
stateStrs[0] = strings.Join(fieldNames, string(delim))
for i, state := range sc {
stateStrs[i+1] = state.delimSepVal(delim)
}
return strings.Join(stateStrs, "\n")
}
var sd = stateCollection{
{"West Virginia", 176924, false},
{"South Carolina", 581185, true},
{"New Hampshire", 269328, true},
{"Wisconsin", 3635, false},
{"Vermont", 280652, true},
{"Georgia", 516823, true},
{"Pennsylvania", 1348233, true},
{"Florida", 34730, false},
{"Kentucky", 687917, true},
{"Missouri", 140455, true},
}
var csvData = sd.delimSepVals(',')
var psvData = sd.delimSepVals('|')
var untypedSchema = untyped.NewUntypedSchema(fieldNames)
var untypedRows = make([]*table.Row, len(sd))
func init() {
for i, curr := range sd {
popStr := strconv.FormatUint(curr.population, 10)
isStateStr := map[bool]string{true: "true", false: "false"}[curr.isState]
untypedRows[i] = table.NewRow(table.RowDataFromValues(untypedSchema, []types.Value{
types.String(curr.name), types.String(popStr), types.String(isStateStr),
}))
}
}
var typedSchemaJson []byte
var typedRows = make([]*table.Row, len(sd))
var typedSchema = schema.NewSchema([]*schema.Field{
schema.NewField("state", types.StringKind, true),
schema.NewField("population", types.UintKind, true),
})
func init() {
typedSchema.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{0}))
typedSchemaJson, _ = jsonenc.SchemaToJSON(typedSchema)
for i, curr := range sd {
typedRows[i] = table.NewRow(table.RowDataFromValues(typedSchema, []types.Value{
types.String(curr.name), types.Uint(curr.population),
}))
}
}
var mappedSchemaJson []byte
var mappedRows = make([]*table.Row, len(sd))
var mappedSchema = schema.NewSchema([]*schema.Field{
schema.NewField("state", types.BoolKind, true),
schema.NewField("pop", types.UintKind, true),
schema.NewField("stname", types.StringKind, true),
})
var mappingJson = `{
"state":"stname",
"population":"pop",
"is_state": "state"
}`
func init() {
mappedSchema.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{2}))
mappedSchemaJson, _ = jsonenc.SchemaToJSON(mappedSchema)
for i, curr := range sd {
mappedRows[i] = table.NewRow(table.RowDataFromValues(mappedSchema, []types.Value{
types.Bool(curr.isState), types.Uint(curr.population), types.String(curr.name),
}))
}
}
type createTest struct {
args []string
expectedExitCode int
expectedTable *table.InMemTable
pkInExpectedTable string
inFilePath string
inFileContents string
schemaJson []byte
mappingJson string
}
func getTests() []createTest {
return []createTest{
{
[]string{"-table", tableName, "-pk", "state", csvPath},
0,
table.NewInMemTableWithData(untypedSchema, untypedRows),
"state",
csvPath,
csvData,
nil,
"",
},
{
[]string{"-table", tableName, "-schema", schemaPath, psvPath},
0,
table.NewInMemTableWithData(typedSchema, typedRows),
"state",
psvPath,
psvData,
typedSchemaJson,
"",
},
{
[]string{"-table", tableName, "-schema", schemaPath, "-map", mappingPath, csvPath},
0,
table.NewInMemTableWithData(mappedSchema, mappedRows),
"stname",
csvPath,
csvData,
mappedSchemaJson,
mappingJson,
},
}
}
func TestCreateCommand(t *testing.T) {
tests := getTests()
for _, test := range tests {
cliEnv := initTestEnv(t, &test)
exitCode := Create("dolt edit create", test.args, cliEnv)
if exitCode != test.expectedExitCode {
commandLine := "dolt edit create " + strings.Join(test.args, " ")
t.Error(commandLine, "returned with exit code", exitCode, "expected", test.expectedExitCode)
}
dtestutils.CheckResultTable(t, tableName, cliEnv, test.expectedTable, test.pkInExpectedTable)
}
}
func initTestEnv(t *testing.T, test *createTest) *env.DoltCLIEnv {
cliEnv := dtestutils.CreateTestEnv()
err := cliEnv.FS.WriteFile(test.inFilePath, []byte(test.inFileContents))
if err != nil {
t.Fatal("Failed to create test csv file.")
}
if len(test.schemaJson) > 0 {
err = cliEnv.FS.WriteFile(schemaPath, test.schemaJson)
if err != nil {
t.Fatal("Failed to create schema file.")
}
}
if test.mappingJson != "" {
err = cliEnv.FS.WriteFile(mappingPath, []byte(test.mappingJson))
if err != nil {
t.Fatal("Failed to create mapping file.")
}
}
return cliEnv
}
func TestForceFlag(t *testing.T) {
test := getTests()[0]
if test.expectedExitCode != 0 {
t.Fatal("This only works if the test we are running is expected to succeed.")
}
paramSet := set.NewStrSet(test.args)
if paramSet.Contains("-force") {
t.Fatal("This only works if the test isn't already using the Force flag.")
}
cliEnv := initTestEnv(t, &test)
exitCode := Create("dolt edit create", test.args, cliEnv)
if exitCode != 0 {
t.Fatal("Initial execution should succeed")
}
exitCode = Create("dolt edit create", test.args, cliEnv)
if exitCode == 0 {
t.Fatal("Second execution should fail without the Force flag")
}
forcedArgs := make([]string, len(test.args)+1)
copy(forcedArgs, test.args)
forcedArgs[len(test.args)-1], forcedArgs[len(test.args)] = "-force", forcedArgs[len(test.args)-1]
exitCode = Create("dolt edit create", forcedArgs, cliEnv)
if exitCode != 0 {
t.Fatal("Third execution should succeed with the Force flag")
}
}
func TestParseCreateArgs(t *testing.T) {
tests := []struct {
args []string
expectedOpts *mvdata.MoveOptions
}{
{[]string{}, nil},
{[]string{"-table", "table_name"}, nil},
{
[]string{"-table", "table_name", "file.csv"},
&mvdata.MoveOptions{
mvdata.OverwriteOp,
false,
"",
"",
"",
&mvdata.DataLocation{Path: "file.csv", Format: mvdata.CsvFile},
&mvdata.DataLocation{Path: "table_name", Format: mvdata.DoltDB},
},
},
{
[]string{"-table", "table_name", "file.unsupported"},
nil,
},
{
[]string{"-table", "invalid_table_name.csv", "file.csv"},
nil,
},
{
[]string{"-table", "table_name", "-schema", "schema.json", "-pk", "id", "-map", "mapping.json", "-continue", "file.nbf"},
&mvdata.MoveOptions{
mvdata.OverwriteOp,
true,
"schema.json",
"mapping.json",
"id",
&mvdata.DataLocation{Path: "file.nbf", Format: mvdata.NbfFile},
&mvdata.DataLocation{Path: "table_name", Format: mvdata.DoltDB},
},
},
}
for _, test := range tests {
_, actualOpts := parseCreateArgs("dolt edit create", test.args)
if !optsEqual(test.expectedOpts, actualOpts) {
argStr := strings.Join(test.args, " ")
t.Error("Unexpected result for args:", argStr)
}
}
}
func optsEqual(opts1, opts2 *mvdata.MoveOptions) bool {
if opts1 == nil && opts2 == nil {
return true
} else if opts1 == nil || opts2 == nil {
return false
}
return reflect.DeepEqual(opts1, opts2)
}
+11
View File
@@ -0,0 +1,11 @@
package edit
import "github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
var Commands = cli.GenSubCommandHandler([]*cli.Command{
{Name: "create", Desc: "Creates or overwrites a table from the data in a file.", Func: Create, ReqRepo: true},
{Name: "update", Desc: "Updates a table from the data in a file.", Func: Update, ReqRepo: true},
{Name: "put-row", Desc: "Add a row to a table.", Func: PutRow, ReqRepo: true},
{Name: "rm-row", Desc: "Remove a row from a table.", Func: RmRow, ReqRepo: true},
{Name: "export", Desc: "Export a table to a file.", Func: Export, ReqRepo: true},
})
+62
View File
@@ -0,0 +1,62 @@
package edit
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
)
func exportUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func initExportFlagSet(commandStr string) (*flag.FlagSet, *cli.StrArgMap, *cli.BoolFlagMap) {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = exportUsage(fs)
argMap := cli.NewStrArgMap(fs, map[string]string{
outSchemaParam: "The schema for the output data.",
mappingFileParam: "A file that lays out how fields should be mapped from input data to output data",
tableParam: "Source table being exported to a file",
primaryKeyParam: "Explicitly define the name of the field in the schema which should be used as the primary key.",
fileTypeParam: "Explicitly define the type of the file if it can't be inferred from the file extension"})
flagMap := cli.NewBoolFlagMap(fs, map[string]string{
forceParam: "If a create operation is being executed, data already exists in the destination, the Force flag will allow the target to be overwritten",
contOnErrParam: "Continue exporting when row export errors are encountered."})
return fs, argMap, flagMap
}
func Export(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs, argMap, flagMap := initExportFlagSet(commandStr)
fileLoc, tableLoc := validateCreateOrExportArgs(fs, args, argMap, flagMap)
if fileLoc == nil || tableLoc == nil {
return 1
}
force := flagMap.Get(forceParam)
mvOpts := &mvdata.MoveOptions{
mvdata.OverwriteOp,
flagMap.Get(contOnErrParam),
argMap.Get(outSchemaParam),
argMap.Get(mappingFileParam),
argMap.Get(primaryKeyParam),
tableLoc,
fileLoc,
}
result := executeMove(cliEnv, force, mvOpts)
if result == 0 {
fmt.Println(color.CyanString("Successfully exported data."))
}
return result
}
+60
View File
@@ -0,0 +1,60 @@
package edit
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/dtestutils"
"testing"
)
func TestExport(t *testing.T) {
tests := []struct {
args []string
outFilePath string
schemaJson []byte
mappingJson string
outputIsTyped bool
}{
{
[]string{"-table", tableName, "-pk", "id", csvPath},
csvPath,
nil,
"",
false,
},
{
[]string{"-table", tableName, psvPath},
psvPath,
nil,
"",
false,
},
{
[]string{"-table", tableName, nbfPath},
nbfPath,
nil,
"",
true,
},
}
for _, test := range tests {
cliEnv := createEnvWithSeedData(t)
result := Export("dolt edit export", test.args, cliEnv)
if result != 0 {
t.Fatal("Unexpected failure.")
}
outLoc := mvdata.NewDataLocation(test.outFilePath, "")
rd, _, verr := outLoc.CreateReader(nil, cliEnv.FS)
if verr != nil {
t.Fatal(verr.Verbose())
}
idIdx := rd.GetSchema().GetFieldIndex("id")
imt, _ := dtestutils.CreateTestDataTable(test.outputIsTyped)
dtestutils.CheckResultsAgainstReader(t, rd, idIdx, imt, "id")
}
}
@@ -0,0 +1,230 @@
package mvdata
import (
"github.com/attic-labs/noms/go/types"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema/jsonenc"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/nbf"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped/csv"
"github.com/pkg/errors"
"path/filepath"
"strings"
)
type DataFormat string
const (
InvalidDataFormat DataFormat = "invalid"
DoltDB DataFormat = "doltdb"
CsvFile DataFormat = ".csv"
PsvFile DataFormat = ".psv"
NbfFile DataFormat = ".nbf"
)
func (df DataFormat) ReadableStr() string {
switch df {
case DoltDB:
return "dolt table"
case CsvFile:
return "csv file"
case PsvFile:
return "psv file"
case NbfFile:
return "nbf file"
}
return "invalid"
}
func DFFromString(dfStr string) DataFormat {
switch strings.ToLower(dfStr) {
case "csv", ".csv":
return CsvFile
case "psv", ".psv":
return PsvFile
case "nbf", ".nbf":
return NbfFile
}
return InvalidDataFormat
}
type DataLocation struct {
Path string
Format DataFormat
}
func (dl *DataLocation) String() string {
return dl.Format.ReadableStr() + ":" + dl.Path
}
func NewDataLocation(path, fileFmtStr string) *DataLocation {
dataFmt := DFFromString(fileFmtStr)
if fileFmtStr == "" {
if doltdb.IsValidTableName(path) {
dataFmt = DoltDB
}
ext := filepath.Ext(path)
switch strings.ToLower(ext) {
case string(CsvFile):
dataFmt = CsvFile
case string(PsvFile):
dataFmt = PsvFile
case string(NbfFile):
dataFmt = NbfFile
}
}
return &DataLocation{path, dataFmt}
}
func (dl *DataLocation) IsFileType() bool {
switch dl.Format {
case DoltDB:
return false
case InvalidDataFormat:
panic("Invalid format")
}
return true
}
func (dl *DataLocation) CreateReader(root *doltdb.RootValue, fs filesys.ReadableFS) (rdCl table.TableReadCloser, sorted bool, err errhand.VerboseError) {
if dl.Format == DoltDB {
tbl, ok := root.GetTable(dl.Path)
if !ok {
derr := errhand.BuildDError("Table %s does not exist.", dl.Path).Build()
return nil, false, derr
}
sch := tbl.GetSchema(root.VRW())
rd := noms.NewNomsMapReader(tbl.GetRowData(), sch)
return rd, true, nil
} else {
exists, isDir := fs.Exists(dl.Path)
if !exists {
derr := errhand.BuildDError("Attempted to create a reader for a file that doesn't exist.").
AddDetails(`The file "%s" does not exist.`, dl.Path).Build()
return nil, false, derr
} else if isDir {
derr := errhand.BuildDError("Attempted to create a reader for a file that doesn't exist.").
AddDetails(`"%s" is a directory and not a file.`, dl.Path).Build()
return nil, false, derr
}
switch dl.Format {
case CsvFile:
rd, err := csv.OpenCSVReader(dl.Path, fs, csv.NewCSVInfo())
derr := errhand.BuildIf(err, "Failed to open csv reader for %s", dl.Path).AddCause(err).Build()
return rd, false, derr
case PsvFile:
rd, err := csv.OpenCSVReader(dl.Path, fs, csv.NewCSVInfo().SetDelim('|'))
derr := errhand.BuildIf(err, "Failed to open psv reader for %s", dl.Path).AddCause(err).Build()
return rd, false, derr
case NbfFile:
rd, err := nbf.OpenNBFReader(dl.Path, fs)
derr := errhand.BuildIf(err, "Failed to open psv reader for %s", dl.Path).Build()
return rd, true, derr
}
}
panic("Unsupported table format should have failed before reaching here. ")
}
func (dl *DataLocation) Exists(root *doltdb.RootValue, fs filesys.ReadableFS) bool {
if dl.IsFileType() {
exists, _ := fs.Exists(dl.Path)
return exists
}
if dl.Format == DoltDB {
return root.HasTable(dl.Path)
}
panic("Invalid Data Format.")
}
func (dl *DataLocation) CreateOverwritingDataWriter(root *doltdb.RootValue, fs filesys.WritableFS, sortedInput bool, outSch *schema.Schema) (table.TableWriteCloser, errhand.VerboseError) {
if dl.RequiresPK() && outSch.GetPKIndex() == -1 {
builder := errhand.BuildDError("Attempting to write to a %s with a schema that does not contain a primary key.", dl.Format.ReadableStr())
schemaJSon, err := jsonenc.SchemaToJSON(outSch)
if err == nil {
builder.AddDetails("Schema:\n%s", schemaJSon)
} else {
builder.AddDetails("Unable to serialize schema as json.")
}
return nil, builder.Build()
}
switch dl.Format {
case DoltDB:
if sortedInput {
return noms.NewNomsMapCreator(root.VRW(), outSch), nil
} else {
m := types.NewMap(root.VRW())
return noms.NewNomsMapUpdater(root.VRW(), m, outSch), nil
}
case CsvFile:
tWr, err := csv.OpenCSVWriter(dl.Path, fs, outSch, csv.NewCSVInfo())
errhand.BuildIf(err, "Failed to create a csv writer to create/overwrite %s.", dl.Path).Build()
return tWr, nil
case PsvFile:
tWr, err := csv.OpenCSVWriter(dl.Path, fs, outSch, csv.NewCSVInfo().SetDelim('|'))
derr := errhand.BuildIf(err, "Failed to create a csv writer to create/overwrite %s.", dl.Path).Build()
return tWr, derr
case NbfFile:
tWr, err := nbf.OpenNBFWriter(dl.Path, fs, outSch)
derr := errhand.BuildIf(err, "Failed to create a csv writer to create/overwrite %s.", dl.Path).Build()
return tWr, derr
}
panic("Invalid Data Format.")
}
func (dl *DataLocation) CreateUpdatingDataWriter(root *doltdb.RootValue, fs filesys.WritableFS, srcIsSorted bool, outSch *schema.Schema) (table.TableWriteCloser, error) {
switch dl.Format {
case DoltDB:
tableName := dl.Path
tbl, ok := root.GetTable(tableName)
if !ok {
return nil, errors.New("Could not find table " + tableName)
}
m := tbl.GetRowData()
return noms.NewNomsMapUpdater(root.VRW(), m, outSch), nil
case CsvFile, PsvFile, NbfFile:
panic("Update not supported for this file type.")
}
panic("Invalid Data Format.")
}
func (dl *DataLocation) MustWriteSorted() bool {
return dl.Format == NbfFile
}
func (dl *DataLocation) RequiresPK() bool {
return dl.Format == NbfFile || dl.Format == DoltDB
}
@@ -0,0 +1,184 @@
package mvdata
import (
"github.com/attic-labs/noms/go/types"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/nbf"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped/csv"
"github.com/liquidata-inc/ld/dolt/go/libraries/test"
"reflect"
"testing"
)
func createRootAndFS() (*doltdb.DoltDB, *doltdb.RootValue, filesys.Filesys) {
testHomeDir := "/user/bheni"
workingDir := "/user/bheni/datasets/states"
initialDirs := []string{testHomeDir, workingDir}
fs := filesys.NewInMemFS(initialDirs, nil, workingDir)
ddb := doltdb.LoadDoltDB(doltdb.InMemDoltDB)
ddb.WriteEmptyRepo("billy bob", "bigbillieb@fake.horse")
cs, _ := doltdb.NewCommitSpec("HEAD", "master")
commit, _ := ddb.Resolve(cs)
root := commit.GetRootValue()
return ddb, root, fs
}
func TestBasics(t *testing.T) {
tests := []struct {
dl *DataLocation
expectedFmt DataFormat
expectedPath string
expectedIsFileType bool
expectedReqPK bool
expectedMustWrSorted bool
}{
{NewDataLocation("table-name", ""), DoltDB, "table-name", false, true, false},
{NewDataLocation("file.csv", ""), CsvFile, "file.csv", true, false, false},
{NewDataLocation("file.psv", ""), PsvFile, "file.psv", true, false, false},
{NewDataLocation("file.nbf", ""), NbfFile, "file.nbf", true, true, true},
}
for _, test := range tests {
if test.expectedFmt != test.dl.Format {
t.Error(test.dl, "Unexpected format")
}
if test.expectedPath != test.dl.Path {
t.Error("Unexpected path")
}
if test.expectedIsFileType != test.dl.IsFileType() {
t.Error("Unexpected IsFileType result")
}
if test.expectedReqPK != test.dl.RequiresPK() {
t.Error("Unexpected IsFileType result")
}
if test.expectedMustWrSorted != test.dl.MustWriteSorted() {
t.Error("Unexpected IsFileType result")
}
}
}
var fakeFields = []*schema.Field{
schema.NewField("a", types.StringKind, true),
schema.NewField("b", types.StringKind, true),
}
var fakeSchema *schema.Schema
var imt *table.InMemTable
var imtRows []*table.Row
func init() {
fakeSchema = schema.NewSchema(fakeFields)
err := fakeSchema.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{0}))
if err != nil {
panic(test.ShouldNeverHappen)
}
imtRows = []*table.Row{
table.NewRow(table.RowDataFromValues(fakeSchema, []types.Value{types.String("a"), types.String("1")})),
table.NewRow(table.RowDataFromValues(fakeSchema, []types.Value{types.String("b"), types.String("2")})),
table.NewRow(table.RowDataFromValues(fakeSchema, []types.Value{types.String("c"), types.String("3")})),
}
imt = table.NewInMemTableWithData(fakeSchema, imtRows)
}
func TestExists(t *testing.T) {
testLocations := []*DataLocation{
NewDataLocation("table-name", ""),
NewDataLocation("file.csv", ""),
NewDataLocation("file.psv", ""),
NewDataLocation("file.nbf", ""),
}
ddb, root, fs := createRootAndFS()
for _, loc := range testLocations {
if loc.Exists(root, fs) {
t.Error("Shouldn't exist before creation")
}
if loc.Format == DoltDB {
schVal, _ := noms.MarshalAsNomsValue(ddb.ValueReadWriter(), fakeSchema)
tbl := doltdb.NewTable(ddb.ValueReadWriter(), schVal, types.NewMap(ddb.ValueReadWriter()))
root = root.PutTable(ddb, loc.Path, tbl)
} else {
fs.WriteFile(loc.Path, []byte("test"))
}
if !loc.Exists(root, fs) {
t.Error("Should already exist after creation")
}
}
}
func TestCreateRdWr(t *testing.T) {
tests := []struct {
dl *DataLocation
expectedRdT reflect.Type
expectedWrT reflect.Type
}{
{NewDataLocation("table-name", ""), reflect.TypeOf((*noms.NomsMapReader)(nil)).Elem(), reflect.TypeOf((*noms.NomsMapCreator)(nil)).Elem()},
{NewDataLocation("file.csv", ""), reflect.TypeOf((*csv.CSVReader)(nil)).Elem(), reflect.TypeOf((*csv.CSVWriter)(nil)).Elem()},
{NewDataLocation("file.psv", ""), reflect.TypeOf((*csv.CSVReader)(nil)).Elem(), reflect.TypeOf((*csv.CSVWriter)(nil)).Elem()},
{NewDataLocation("file.nbf", ""), reflect.TypeOf((*nbf.NBFReader)(nil)).Elem(), reflect.TypeOf((*nbf.NBFWriter)(nil)).Elem()},
}
ddb, root, fs := createRootAndFS()
for _, test := range tests {
loc := test.dl
wr, err := loc.CreateOverwritingDataWriter(root, fs, true, fakeSchema)
if err != nil {
t.Fatal("Unexpected error creating writer.", err)
}
actualWrT := reflect.TypeOf(wr).Elem()
if actualWrT != test.expectedWrT {
t.Fatal("Unexpected writer type. Expected:", test.expectedWrT.Name(), "actual:", actualWrT.Name())
}
inMemRd := table.NewInMemTableReader(imt)
_, numBad, pipeErr := table.PipeRows(inMemRd, wr, false)
wr.Close()
if numBad != 0 || pipeErr != nil {
t.Fatal("Failed to write data. bad:", numBad, err)
}
if nomsWr, ok := wr.(noms.NomsMapWriteCloser); ok {
vrw := ddb.ValueReadWriter()
schVal, err := noms.MarshalAsNomsValue(vrw, nomsWr.GetSchema())
if err != nil {
t.Fatal("Unable ta update table")
}
tbl := doltdb.NewTable(vrw, schVal, *nomsWr.GetMap())
root = root.PutTable(ddb, test.dl.Path, tbl)
}
rd, _, err := loc.CreateReader(root, fs)
if err != nil {
t.Fatal("Unexpected error creating writer", err)
}
actualRdT := reflect.TypeOf(rd).Elem()
if actualRdT != test.expectedRdT {
t.Error("Unexpected reader type. Expected:", test.expectedRdT.Name(), "actual:", actualRdT.Name())
}
rd.Close()
}
}
@@ -0,0 +1,229 @@
package mvdata
import (
"encoding/json"
"errors"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema/jsonenc"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
)
type MoveOperation string
const (
OverwriteOp MoveOperation = "overwrite"
UpdateOp MoveOperation = "update"
)
type MoveOptions struct {
Operation MoveOperation
ContOnErr bool
SchFile string
MappingFile string
PrimaryKey string
Src *DataLocation
Dest *DataLocation
}
type DataMover struct {
Rd table.TableReadCloser
Transforms []table.TransformFunc
Wr table.TableWriteCloser
ContOnErr bool
}
func NewDataMover(root *doltdb.RootValue, fs filesys.Filesys, mvOpts *MoveOptions) (*DataMover, errhand.VerboseError) {
var rd table.TableReadCloser
var err error
var transforms []table.TransformFunc
defer func() {
if rd != nil {
rd.Close()
}
}()
rd, srcIsSorted, err := mvOpts.Src.CreateReader(root, fs)
if err != nil {
bdr := errhand.BuildDError("Error creating reader for %s.", mvOpts.Src.Path)
bdr.AddDetails("When attempting to move data from %s to %s, could not open a reader.", mvOpts.Src.String(), mvOpts.Dest.String())
return nil, bdr.AddCause(err).Build()
}
outSch, err := getOutSchema(rd.GetSchema(), root, fs, mvOpts)
if err != nil {
bdr := errhand.BuildDError("Error determining the output schema.")
bdr.AddDetails("When attempting to move data from %s to %s, could not determine the output schema.", mvOpts.Src.String(), mvOpts.Dest.String())
bdr.AddDetails(`Schema File: "%s"`, mvOpts.SchFile)
bdr.AddDetails(`explicit pk: "%s"`, mvOpts.PrimaryKey)
return nil, bdr.AddCause(err).Build()
}
var mapping *schema.FieldMapping
if mvOpts.MappingFile != "" {
mapping, err = schema.MappingFromFile(mvOpts.MappingFile, fs, rd.GetSchema(), outSch)
} else {
mapping, err = schema.NewInferredMapping(rd.GetSchema(), outSch)
}
if err != nil {
bdr := errhand.BuildDError("Error determining the mapping from input fields to output fields.")
bdr.AddDetails("When attempting to move data from %s to %s, determine the mapping from input fields t, output fields.", mvOpts.Src.String(), mvOpts.Dest.String())
bdr.AddDetails(`Mapping File: "%s"`, mvOpts.MappingFile)
return nil, bdr.AddCause(err).Build()
}
transforms, err = maybeMapFields(transforms, mapping)
if err != nil {
mappingJson, jmErr := json.Marshal(mapping.DestSch)
bdr := errhand.BuildDError("Error creating input to output mapper.")
details := fmt.Sprintf("When attempting to move data from %s to %s, could not create a mapper.", mvOpts.Src.String(), mvOpts.Dest.String())
if jmErr == nil {
details += "mapping: " + string(mappingJson)
}
bdr.AddDetails(details)
return nil, bdr.AddCause(err).Build()
}
var wr table.TableWriteCloser
if mvOpts.Operation == OverwriteOp {
wr, err = mvOpts.Dest.CreateOverwritingDataWriter(root, fs, srcIsSorted, outSch)
} else {
wr, err = mvOpts.Dest.CreateUpdatingDataWriter(root, fs, srcIsSorted, outSch)
}
if err != nil {
bdr := errhand.BuildDError("Error creating writer for %s.\n", mvOpts.Dest.Path)
bdr.AddDetails("When attempting to move data from %s to %s, could not open a writer.", mvOpts.Src.String(), mvOpts.Dest.String())
return nil, bdr.AddCause(err).Build()
}
wr, err = maybeSort(wr, outSch, srcIsSorted, mvOpts)
if err != nil {
bdr := errhand.BuildDError("Error creating sorting reader.")
bdr.AddDetails("When attempting to move data from %s to %s, could not open create sorting reader.", mvOpts.Src.String(), mvOpts.Dest.String())
return nil, bdr.AddCause(err).Build()
}
imp := &DataMover{rd, transforms, wr, mvOpts.ContOnErr}
rd = nil
return imp, nil
}
func (imp *DataMover) Move() error {
defer imp.Rd.Close()
defer imp.Wr.Close()
var rowErr error
badRowCB := func(transfName string, row *table.Row, errDetails string) (quit bool) {
if !imp.ContOnErr {
rowErr = errors.New(transfName + "failed. " + errDetails)
return false
}
return true
}
pipeline := table.StartAsyncPipeline(imp.Rd, imp.Transforms, imp.Wr, badRowCB)
err := pipeline.Wait()
if err != nil {
return err
}
return rowErr
}
func maybeMapFields(transforms []table.TransformFunc, mapping *schema.FieldMapping) ([]table.TransformFunc, error) {
rconv, err := table.NewRowConverter(mapping)
if err != nil {
return nil, err
}
if !rconv.IdentityConverter {
transformer := table.NewRowTransformer("Mapping transform", rconv.TransformRow)
transforms = append(transforms, transformer)
}
return transforms, nil
}
func maybeSort(wr table.TableWriteCloser, outSch *schema.Schema, srcIsSorted bool, mvOpts *MoveOptions) (table.TableWriteCloser, error) {
if !srcIsSorted && mvOpts.Dest.MustWriteSorted() {
wr = table.NewSortingTableWriter(wr, outSch.GetPKIndex(), mvOpts.ContOnErr)
}
return wr, nil
}
func getOutSchema(inSch *schema.Schema, root *doltdb.RootValue, fs filesys.ReadableFS, mvOpts *MoveOptions) (*schema.Schema, error) {
if mvOpts.Operation == UpdateOp {
// Get schema from target
rd, _, err := mvOpts.Dest.CreateReader(root, fs)
if err != nil {
return nil, err
}
defer rd.Close()
return rd.GetSchema(), nil
} else {
sch, err := schFromFileOrDefault(mvOpts.SchFile, fs, inSch)
if err != nil {
return nil, err
}
sch, err = addPrimaryKey(sch, mvOpts.PrimaryKey)
if err != nil {
return nil, err
}
return sch, nil
}
}
func schFromFileOrDefault(path string, fs filesys.ReadableFS, defSch *schema.Schema) (*schema.Schema, error) {
if path != "" {
data, err := fs.ReadFile(path)
if err != nil {
return nil, err
}
return jsonenc.SchemaFromJSON(data)
} else {
return defSch, nil
}
}
func addPrimaryKey(sch *schema.Schema, explicitKey string) (*schema.Schema, error) {
explicitKeyIdx := sch.GetFieldIndex(explicitKey)
if explicitKey != "" {
if explicitKeyIdx == -1 {
return nil, fmt.Errorf("could not find a field named \"%s\" in the schema", explicitKey)
} else {
sch = sch.CopyWithoutConstraints()
sch.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{explicitKeyIdx}))
}
}
return sch, nil
}
@@ -0,0 +1,135 @@
package mvdata
import (
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"testing"
)
const (
schemaFile = "schema.json"
mappingFile = "mapping.json"
)
func TestDataMover(t *testing.T) {
tests := []struct {
schemaJSON string
mappingJSON string
mvOpts *MoveOptions
}{
{
"",
"",
&MoveOptions{
Operation: OverwriteOp,
ContOnErr: false,
SchFile: "",
MappingFile: "",
PrimaryKey: "",
Src: NewDataLocation("data.csv", ""),
Dest: NewDataLocation("data.psv", "psv")},
},
{
"",
"",
&MoveOptions{
Operation: OverwriteOp,
ContOnErr: false,
SchFile: "",
MappingFile: "",
PrimaryKey: "a",
Src: NewDataLocation("data.csv", ""),
Dest: NewDataLocation("data.nbf", "")},
},
{
"",
"",
&MoveOptions{
Operation: OverwriteOp,
ContOnErr: false,
SchFile: "",
MappingFile: "",
PrimaryKey: "",
Src: NewDataLocation("data.nbf", "nbf"),
Dest: NewDataLocation("table-name", "")},
},
{
"",
"",
&MoveOptions{
Operation: OverwriteOp,
ContOnErr: false,
SchFile: "",
MappingFile: "",
PrimaryKey: "a",
Src: NewDataLocation("data.csv", ""),
Dest: NewDataLocation("table-name", "")},
},
{
`{
"fields": [
{"name": "key", "kind": "string", "required": true},
{"name": "value", "kind": "int", "required": true}
],
"constraints": [
{"constraint_type":"primary_key", "field_indices":[0]}
]
}`,
`{"a":"key","b":"value"}`,
&MoveOptions{
Operation: OverwriteOp,
ContOnErr: false,
SchFile: "",
MappingFile: "",
PrimaryKey: "",
Src: NewDataLocation("data.csv", ""),
Dest: NewDataLocation("table-name", "")},
},
}
for _, test := range tests {
var err error
var verr errhand.VerboseError
_, root, fs := createRootAndFS()
if test.schemaJSON != "" {
test.mvOpts.SchFile = schemaFile
err = fs.WriteFile(schemaFile, []byte(test.schemaJSON))
}
if test.mappingJSON != "" {
test.mvOpts.MappingFile = mappingFile
err = fs.WriteFile(mappingFile, []byte(test.mappingJSON))
}
src := test.mvOpts.Src
seedWr, verr := src.CreateOverwritingDataWriter(root, fs, true, fakeSchema)
if verr != nil {
t.Fatal(verr.Verbose())
}
imtRd := table.NewInMemTableReader(imt)
_, _, err = table.PipeRows(imtRd, seedWr, false)
seedWr.Close()
imtRd.Close()
if err != nil {
t.Fatal(err)
}
dm, verr := NewDataMover(root, fs, test.mvOpts)
if verr != nil {
t.Fatal(verr.Verbose())
}
err = dm.Move()
if err != nil {
t.Fatal(err)
}
}
}
+150
View File
@@ -0,0 +1,150 @@
package edit
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"os"
"strings"
)
type putRowArgs struct {
FieldNames []string
KVPs map[string]string
TableName string
}
func putRowUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func parsePutRowArgs(commandStr string, args []string) (*putRowArgs, errhand.VerboseError) {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = putRowUsage(fs)
tableName := fs.String(tableParam, "", "The table where the row should be added.")
fs.Parse(args)
if *tableName == "" {
return nil, errhand.BuildDError("Missing required paramater -%s", tableParam).Build()
}
fieldNames, kvps, verr := parseKVPs(fs.Args())
if verr != nil {
return nil, verr
}
return &putRowArgs{fieldNames, kvps, *tableName}, nil
}
func parseKVPs(args []string) ([]string, map[string]string, errhand.VerboseError) {
fieldNames := make([]string, len(args))
kvps := make(map[string]string, len(args))
for i, arg := range args {
colonIndex := strings.IndexByte(arg, ':')
if colonIndex != -1 {
key := strings.ToLower(strings.TrimSpace(arg[:colonIndex]))
value := arg[colonIndex+1:]
if key != "" {
kvps[key] = value
fieldNames[i] = key
} else {
bdr := errhand.BuildDError(`"%s" is not a valid key value pair.`, strings.TrimSpace(arg))
bdr.AddDetails("Key value pairs must be in the format key:value, where the length of key must be at least 1 character. \"%s\" has a length of 0 characters", strings.TrimSpace(arg))
return nil, nil, bdr.Build()
}
} else {
bdr := errhand.BuildDError(`"%s" is not a valid key value pair.`, strings.TrimSpace(arg))
bdr.AddDetails("Key value pairs must be in the format key:value. \"%s\" has no key value separator ':'. ", strings.TrimSpace(arg))
bdr.AddDetails("To set a value to empty you may use \"key:\" but not just \"key\", however leaving this key off of the command line has the same effect.")
return nil, nil, bdr.Build()
}
}
return fieldNames, kvps, nil
}
func PutRow(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
prArgs, verr := parsePutRowArgs(commandStr, args)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
root, err := cliEnv.WorkingRoot()
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Unable to get working value."))
return 1
}
tbl, ok := root.GetTable(prArgs.TableName)
if !ok {
fmt.Fprintln(os.Stderr, color.RedString("Unknown table %s", prArgs.TableName))
return 1
}
vrw := root.VRW()
sch := tbl.GetSchema(vrw)
row, verr := createRow(sch, prArgs)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
me := tbl.GetRowData().Edit()
updated := me.Set(table.GetPKFromRow(row), table.GetNonPKFieldListFromRow(row, vrw)).Map()
tbl = tbl.UpdateRows(updated)
root = root.PutTable(cliEnv.DoltDB, prArgs.TableName, tbl)
verr = cliEnv.UpdateWorkingRoot(root)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
fmt.Println(color.CyanString("Successfully put row."))
return 0
}
func createRow(sch *schema.Schema, prArgs *putRowArgs) (*table.Row, errhand.VerboseError) {
_, _, unknownFields := sch.IntersectFields(prArgs.FieldNames)
if len(unknownFields) > 0 {
bdr := errhand.BuildDError("Not all supplied keys are known in this table's schema.")
bdr.AddDetails("The fields %v were supplied but are not known in this table.", unknownFields)
return nil, bdr.Build()
}
rd, firstBad := table.RowDataFromUntypedMap(sch, prArgs.KVPs)
row := table.NewRow(rd)
if firstBad != nil {
fld := sch.GetField(sch.GetFieldIndex(*firstBad))
val := prArgs.KVPs[*firstBad]
bdr := errhand.BuildDError("Not all parameter values could be converted to the appropriate types for the table.")
bdr.AddDetails(`For parameter "%s", could not convert "%s" to a %s`, *firstBad, val, fld.KindString())
return nil, bdr.Build()
}
if !table.RowIsValid(row) {
invalidFlds := table.InvalidFieldsForRow(row)
bdr := errhand.BuildDError("Missing required fields.")
bdr.AddDetails("The following missing fields are required: %v", invalidFlds)
return nil, bdr.Build()
}
return row, nil
}
+71
View File
@@ -0,0 +1,71 @@
package edit
import (
"github.com/attic-labs/noms/go/types"
"github.com/google/uuid"
"strings"
"testing"
)
var expectedId = types.UUID(uuid.Must(uuid.Parse("11111111-1111-1111-1111-111111111111")))
var expectedFieldVals = map[string]types.Value{
"id": expectedId,
"name": types.String("Eric Ericson"),
"age": types.Uint(45),
"is_married": types.Bool(true),
}
func TestPutRow(t *testing.T) {
tests := []struct {
args []string
expectedRes int
expectedTitle string
}{
{[]string{""}, 1, ""},
{[]string{"-table", tableName}, 1, ""},
{[]string{"-table", tableName, "id:", "name:Eric Ericson", "age:45", "is_married:true"}, 1, ""},
{[]string{"-table", tableName, "id:11111111-1111-1111-1111-111111111111", "name:Eric Ericson", "age:45", "is_married:true", "title:Dolt"}, 0, "Dolt"},
{[]string{"-table", tableName, "id:11111111-1111-1111-1111-111111111111", "name:Eric Ericson", "age:45", "is_married:true", "title:"}, 0, ""},
{[]string{"-table", tableName, "id:11111111-1111-1111-1111-111111111111", "name:Eric Ericson", "age:45", "is_married:true", "title"}, 1, ""},
{[]string{"-table", tableName, "id:11111111-1111-1111-1111-111111111111", "name:Eric Ericson", "age:45", "is_married:true", ":Dolt"}, 1, ""},
{[]string{"-table", tableName, "id:1", "name:Eric Ericson", "age:45", "is_married:true"}, 1, ""},
{[]string{"-table", tableName, "id:1", "name:Eric Ericson", "age:45", "is_married:true"}, 1, ""},
}
for _, test := range tests {
cliEnv := createEnvWithSeedData(t)
commandStr := "dolt edit putrow"
result := PutRow(commandStr, test.args, cliEnv)
if result != test.expectedRes {
commandLine := commandStr + " " + strings.Join(test.args, " ")
t.Fatal("Unexpected failure. command", commandLine, "returned", result)
}
if result == 0 {
root, _ := cliEnv.WorkingRoot()
tbl, _ := root.GetTable(tableName)
sch := tbl.GetSchema(cliEnv.DoltDB.ValueReadWriter())
row, exists := tbl.GetRow(expectedId, sch)
if !exists {
t.Fatal("Could not find row")
}
rowData := row.CurrData()
for k, v := range expectedFieldVals {
val, fld := rowData.GetFieldByName(k)
if !val.Equals(v) {
t.Error("Unexpected value for", fld.NameStr(), "expected:", v, "actual:", val)
}
}
titleVal, _ := rowData.GetFieldByName("title")
if !titleVal.Equals(types.String(test.expectedTitle)) {
t.Error("Value of title was not the expected value. expected", test.expectedTitle, "actual", titleVal)
}
}
}
}
+165
View File
@@ -0,0 +1,165 @@
package edit
import (
"flag"
"fmt"
"github.com/attic-labs/noms/go/types"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"os"
"strings"
)
func rmRowUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
type rmRowArgs struct {
TableName string
PKFldName string
PKValue string
}
func parseRmRowArgs(commandStr string, args []string) (*rmRowArgs, errhand.VerboseError) {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = updateUsage(fs)
tableName := fs.String(tableParam, "", "The table where the row should be added.")
fs.Parse(args)
if *tableName == "" {
return nil, errhand.BuildDError("Missing required paramater -%s", tableParam).Build()
}
pk := ""
pkVal := ""
if fs.NArg() > 1 {
bdr := errhand.BuildDError("Must supply exactly one key:value for the row to delete.")
bdr.AddDetails("Need exactly one key:value where key should be the name of the primary key field, and is the primary key's value for the row being deleted.")
return nil, bdr.Build()
} else if fs.NArg() == 1 {
keys, kvps, verr := parseKVPs(fs.Args())
if verr != nil {
return nil, verr
}
pk = keys[0]
pkVal = kvps[pk]
}
return &rmRowArgs{*tableName, pk, pkVal}, nil
}
func RmRow(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
rmArgs, verr := parseRmRowArgs(commandStr, args)
if verr == nil {
var root *doltdb.RootValue
var tbl *doltdb.Table
root, tbl, verr = getRootAndTable(cliEnv, rmArgs.TableName)
if verr == nil {
var pkVal types.Value
pkVal, verr = getPKOfRowToDelete(root, tbl, rmArgs.PKFldName, rmArgs.PKValue)
if verr == nil {
verr = updateTableWithRowRemoved(root, tbl, rmArgs.TableName, pkVal, cliEnv)
}
}
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
fmt.Println(color.CyanString("Successfully Removed row."))
return 0
}
func getRootAndTable(cliEnv *env.DoltCLIEnv, tblName string) (*doltdb.RootValue, *doltdb.Table, errhand.VerboseError) {
root, err := cliEnv.WorkingRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to get working value for the dolt data repository.").Build()
}
tbl, ok := root.GetTable(tblName)
if !ok {
return nil, nil, errhand.BuildDError("Unknown table %s", tblName).Build()
}
return root, tbl, nil
}
func getPKOfRowToDelete(root *doltdb.RootValue, tbl *doltdb.Table, pkFldName, pkValue string) (types.Value, errhand.VerboseError) {
vrw := root.VRW()
sch := tbl.GetSchema(vrw)
fld := sch.GetField(sch.GetPKIndex())
if pkFldName == "" {
return nil, errhand.BuildDError("Missing required parameter %s:PK_VALUE", fld.NameStr()).Build()
} else if fld.NameStr() != strings.ToLower(pkFldName) {
bdr := errhand.BuildDError("Missing required parameter %s:PK_VALUE", fld.NameStr())
bdr.AddDetails("Supplied parameter %[1]s:%[2]s is not valid as %[1]s is not the primary key.", pkFldName, pkValue)
return nil, bdr.Build()
}
convFunc := table.GetConvFunc(types.StringKind, fld.NomsKind())
if convFunc == nil {
bdr := errhand.BuildDError(`Could not convert from "%[1]s" to a %[2]s as conversion from string to %[2]s is not defined.`, pkValue, fld.KindString())
return nil, bdr.Build()
}
pk, err := convFunc(types.String(pkValue))
if err != nil {
return nil, errhand.BuildDError(`Failed to convert from "%s" to a %s`, pkValue, fld.KindString()).Build()
}
return pk, nil
}
func updateTableWithRowRemoved(root *doltdb.RootValue, tbl *doltdb.Table, tblName string, pk types.Value, cliEnv *env.DoltCLIEnv) errhand.VerboseError {
m := tbl.GetRowData()
_, ok := m.MaybeGet(pk)
if !ok {
return errhand.BuildDError(`No row with the key of %s was found.`, types.EncodedValue(pk)).Build()
}
verr := errhand.PanicToVError("Failed to remove the row from the table.", func() errhand.VerboseError {
me := m.Edit()
me.Remove(pk)
m = me.Map()
return nil
})
if verr != nil {
return verr
}
verr = errhand.PanicToVError("Failed to update the table.", func() errhand.VerboseError {
tbl = tbl.UpdateRows(m)
root = root.PutTable(cliEnv.DoltDB, tblName, tbl)
return nil
})
if verr != nil {
return verr
}
verr = cliEnv.UpdateWorkingRoot(root)
return verr
}
+69
View File
@@ -0,0 +1,69 @@
package edit
import (
"github.com/attic-labs/noms/go/types"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/dtestutils"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"strings"
"testing"
)
var allIDs = []types.UUID{
types.UUID(dtestutils.UUIDS[0]),
types.UUID(dtestutils.UUIDS[1]),
types.UUID(dtestutils.UUIDS[2]),
}
var noZeroID = []types.UUID{
types.UUID(dtestutils.UUIDS[1]),
types.UUID(dtestutils.UUIDS[2]),
}
func TestRmRow(t *testing.T) {
tests := []struct {
args []string
expectedRet int
expectedKeys []types.UUID
}{
{[]string{}, 1, allIDs},
{[]string{"-table", tableName}, 1, allIDs},
{[]string{"-table", tableName, "id:00000000-0000-0000-0000-000000000000"}, 0, noZeroID},
{[]string{"-table", tableName, "id:"}, 1, allIDs},
{[]string{"-table", tableName, "id"}, 1, allIDs},
{[]string{"-table", tableName, "00000000-0000-0000-0000-000000000000"}, 1, allIDs},
{[]string{"-table", tableName, "id:not_a_uuid"}, 1, allIDs},
{[]string{"-table", tableName, "id:99999999-9999-9999-9999-999999999999"}, 1, allIDs},
}
for _, test := range tests {
cliEnv := createEnvWithSeedData(t)
commandStr := "dolt edit putrow"
result := RmRow(commandStr, test.args, cliEnv)
if result != test.expectedRet {
commandLine := commandStr + " " + strings.Join(test.args, " ")
t.Fatal("Unexpected failure. command", commandLine, "returned", result)
}
checkExpectedRows(t, commandStr+strings.Join(test.args, " "), cliEnv, test.expectedKeys)
}
}
func checkExpectedRows(t *testing.T, commandStr string, cliEnv *env.DoltCLIEnv, uuids []types.UUID) {
root, _ := cliEnv.WorkingRoot()
tbl, _ := root.GetTable(tableName)
m := tbl.GetRowData()
if int(m.Len()) != len(uuids) {
t.Error("For", commandStr, "- Expected Row Count:", len(uuids), "Actual:", m.Len())
}
for _, uuid := range uuids {
_, ok := m.MaybeGet(uuid)
if !ok {
t.Error("For", commandStr, "- Expected row with id:", uuid, "not found.")
}
}
}
+134
View File
@@ -0,0 +1,134 @@
package edit
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"os"
)
func updateUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func initUpdateFlagSet(commandStr string) (*flag.FlagSet, *cli.StrArgMap, *cli.BoolFlagMap) {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = updateUsage(fs)
argMap := cli.NewStrArgMap(fs, map[string]string{
mappingFileParam: "A file that lays out how fields should be mapped from input data to output data",
tableParam: "Destination of where the new data should be imported to.",
fileTypeParam: "Explicitly define the type of the file if it can't be inferred from the file extension"})
flagMap := cli.NewBoolFlagMap(fs, map[string]string{
contOnErrParam: "Continue importing when row import errors are encountered."})
return fs, argMap, flagMap
}
func validateUpdateArgs(fs *flag.FlagSet, args []string, argMap *cli.StrArgMap, flagMap *cli.BoolFlagMap) (*mvdata.DataLocation, *mvdata.DataLocation) {
fs.Parse(args)
argMap.Update()
emptyArgs := argMap.GetEmpty()
for _, required := range []string{tableParam} {
if emptyArgs.Contains(required) {
return invalidOptions(fs, "Missing required paramater -%s", required)
}
}
tableName := argMap.Get(tableParam)
if !doltdb.IsValidTableName(tableName) {
fmt.Fprintln(
os.Stderr,
color.RedString("\"%s\" is not a valid table name\n", tableName),
"table names must match the regular expression", tableParam)
return nil, nil
}
if fs.NArg() != 1 {
return invalidOptions(fs, "Exactly one file must be provided to import.")
}
path := fs.Arg(0)
fileLoc := mvdata.NewDataLocation(path, argMap.Get(fileTypeParam))
if fileLoc.Format == mvdata.InvalidDataFormat {
return invalidOptions(fs, "Could not infer type from parameter %s. Should be a valid table name or a supported file type.", path)
}
tableLoc := &mvdata.DataLocation{tableName, mvdata.DoltDB}
return fileLoc, tableLoc
}
func parseUpdateArgs(commandStr string, args []string) *mvdata.MoveOptions {
fs, argMap, flagMap := initUpdateFlagSet(commandStr)
fileLoc, tableLoc := validateUpdateArgs(fs, args, argMap, flagMap)
if fileLoc == nil || tableLoc == nil {
return nil
}
return &mvdata.MoveOptions{
mvdata.UpdateOp,
flagMap.Get(contOnErrParam),
"",
argMap.Get(mappingFileParam),
"",
fileLoc,
tableLoc,
}
}
func Update(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
mvOpts := parseUpdateArgs(commandStr, args)
if mvOpts == nil {
return 1
}
root, err := cliEnv.WorkingRoot()
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Unable to get working value."))
return 1
}
if !mvOpts.Dest.Exists(root, cliEnv.FS) {
fmt.Fprintln(os.Stderr, color.RedString("Cannot find the table %s", mvOpts.Dest.Path))
return 1
}
mover, verr := mvdata.NewDataMover(root, cliEnv.FS, mvOpts)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
err = mover.Move()
if err != nil {
return 1
}
if nomsWr, ok := mover.Wr.(noms.NomsMapWriteCloser); ok {
err = cliEnv.PutTableToWorking(*nomsWr.GetMap(), nomsWr.GetSchema(), mvOpts.Dest.Path)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Failed to update the working value."))
return 1
}
}
return 0
}
+157
View File
@@ -0,0 +1,157 @@
package edit
import (
"github.com/attic-labs/noms/go/types"
"github.com/google/uuid"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit/mvdata"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/dtestutils"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"strings"
"testing"
)
func createEnvWithSeedData(t *testing.T) *env.DoltCLIEnv {
cliEnv := dtestutils.CreateTestEnv()
imt, sch := dtestutils.CreateTestDataTable(true)
rd := table.NewInMemTableReader(imt)
wr := noms.NewNomsMapCreator(cliEnv.DoltDB.ValueReadWriter(), sch)
_, _, err := table.PipeRows(rd, wr, false)
rd.Close()
wr.Close()
if err != nil {
t.Error("Failed to seed initial data", err)
}
err = cliEnv.PutTableToWorking(*wr.GetMap(), wr.GetSchema(), tableName)
if err != nil {
t.Error("Unable to put initial value of table in in mem noms db", err)
}
return cliEnv
}
var expectedRows []*table.Row
var expectedIMT *table.InMemTable
func init() {
uuids := []types.UUID{
types.UUID(uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000000"))),
types.UUID(uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000001"))),
types.UUID(uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000002"))),
types.UUID(uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000003"))),
}
sch := dtestutils.TypedSchema
expectedRows = []*table.Row{
table.NewRow(table.RowDataFromValues(sch, []types.Value{uuids[0], types.String("Aaron Aaronson"), types.Uint(55), types.String("the best"), types.Bool(true)})),
table.NewRow(table.RowDataFromValues(sch, []types.Value{uuids[1], types.String("John Johnson"), types.Uint(25), types.String("Dufus"), types.Bool(false)})),
table.NewRow(table.RowDataFromValues(sch, []types.Value{uuids[2], types.String("Rob Robertson"), types.Uint(21), types.String(""), types.Bool(false)})),
table.NewRow(table.RowDataFromValues(sch, []types.Value{uuids[3], types.String("Morris Morrison"), types.Uint(14), types.String(""), types.Bool(false)})),
}
expectedIMT = table.NewInMemTableWithData(dtestutils.TypedSchema, expectedRows)
}
func TestUpdate(t *testing.T) {
tests := []struct {
args []string
csvData string
mappingJSON string
expectedExitCode int
expectedIMT *table.InMemTable
}{
{
[]string{"-table", tableName, csvPath},
`id, name, title, age, is_married
00000000-0000-0000-0000-000000000000,Aaron Aaronson,the best,55,true
00000000-0000-0000-0000-000000000003,Morris Morrison,,14,false`,
"",
0,
expectedIMT,
},
}
for _, test := range tests {
cliEnv := createEnvWithSeedData(t)
err := cliEnv.FS.WriteFile(csvPath, []byte(test.csvData))
if err != nil {
t.Fatal("Failed to create mapping file.")
}
if test.mappingJSON != "" {
err = cliEnv.FS.WriteFile(mappingPath, []byte(test.mappingJSON))
if err != nil {
t.Fatal("Failed to create mapping file.")
}
}
exitCode := Update("dolt edit create", test.args, cliEnv)
if exitCode != test.expectedExitCode {
commandLine := "dolt edit update " + strings.Join(test.args, " ")
t.Error(commandLine, "returned with exit code", exitCode, "expected", test.expectedExitCode)
}
dtestutils.CheckResultTable(t, tableName, cliEnv, test.expectedIMT, "id")
}
}
func TestParseUpdateArgs(t *testing.T) {
tests := []struct {
args []string
expectedOpts *mvdata.MoveOptions
}{
{[]string{}, nil},
{[]string{"-table", "table_name"}, nil},
{
[]string{"-table", "table_name", "file.csv"},
&mvdata.MoveOptions{
mvdata.UpdateOp,
false,
"",
"",
"",
&mvdata.DataLocation{Path: "file.csv", Format: mvdata.CsvFile},
&mvdata.DataLocation{Path: "table_name", Format: mvdata.DoltDB},
},
},
{
[]string{"-table", "table_name", "file.unsupported"},
nil,
},
{
[]string{"-table", "invalid_table_name.csv", "file.csv"},
nil,
},
{
[]string{"-table", "table_name", "-map", "mapping.json", "-continue", "file.nbf"},
&mvdata.MoveOptions{
mvdata.UpdateOp,
true,
"",
"mapping.json",
"",
&mvdata.DataLocation{Path: "file.nbf", Format: mvdata.NbfFile},
&mvdata.DataLocation{Path: "table_name", Format: mvdata.DoltDB},
},
},
}
for _, test := range tests {
actualOpts := parseUpdateArgs("dolt edit update", test.args)
if !optsEqual(test.expectedOpts, actualOpts) {
argStr := strings.Join(test.args, " ")
t.Error("Unexpected result for args:", argStr)
}
}
}
+61
View File
@@ -0,0 +1,61 @@
package commands
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"os"
)
func initUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
// Init is used by the init command
func Init(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
if cliEnv.HasLDDir() {
fmt.Fprintln(os.Stderr, color.RedString("This directory has already been initialized."))
return 1
} else if !cliEnv.IsCWDEmpty() {
fmt.Fprintln(os.Stderr, color.RedString("init must be run on an empty directory"))
return 1
}
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = initUsage(fs)
name := fs.String("name", "", "The name used in commits to this repo. If not provided will be taken from \""+env.UserNameKey+"\" in the global config.")
email := fs.String("email", "", "The email address used. If not provided will be taken from \""+env.UserEmailKey+"\" in the global config.")
fs.Parse(args)
name = cliEnv.Config.IfEmptyUseConfig(*name, env.UserNameKey)
email = cliEnv.Config.IfEmptyUseConfig(*email, env.UserEmailKey)
if *name == "" {
fmt.Fprintln(os.Stderr,
color.RedString("Could not determine %[1]s. "+
"Use the init parameter -name \"FIRST LAST\" to set it for this repo, "+
"or dolt config -global -set %[1]s:\"FIRST LAST\"", env.UserNameKey))
return 1
} else if *email == "" {
fmt.Fprintln(os.Stderr,
color.RedString("Could not determine %[1]s. "+
"Use the init parameter -email \"EMAIL_ADDRESS\" to set it for this repo, "+
"or dolt config -global -set %[1]s:\"EMAIL_ADDRESS\"", env.UserEmailKey))
return 1
}
err := cliEnv.InitRepo(*name, *email)
if err != nil {
fmt.Fprintln(os.Stderr, color.RedString("Failed to initialize directory as a data repo. %s", err.Error()))
return 1
}
fmt.Println(color.CyanString("Successfully initialized dolt data repository."))
return 0
}
+90
View File
@@ -0,0 +1,90 @@
package commands
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"testing"
)
func TestInit(t *testing.T) {
tests := []struct {
Name string
Args []string
GlobalConfig map[string]string
ExpectSuccess bool
}{
{
"Command Line name and email",
[]string{"-name", "Bill Billerson", "-email", "bigbillieb@fake.horse"},
map[string]string{},
true,
},
{
"Global config name and email",
[]string{},
map[string]string{
env.UserNameKey: "Bill Billerson",
env.UserEmailKey: "bigbillieb@fake.horse",
},
true,
},
{
"No Name",
[]string{"-email", "bigbillieb@fake.horse"},
map[string]string{},
false,
},
{
"No Email",
[]string{"-name", "Bill Billerson"},
map[string]string{},
false,
},
}
for _, test := range tests {
cliEnv := createUninitializedEnv()
gCfg, _ := cliEnv.Config.GetConfig(env.GlobalConfig)
gCfg.SetStrings(test.GlobalConfig)
result := Init("dolt init", test.Args, cliEnv)
if (result == 0) != test.ExpectSuccess {
t.Error(test.Name, "- Expected success:", test.ExpectSuccess, "result:", result == 0)
} else if test.ExpectSuccess {
// succceeded as expected
if !cliEnv.HasLDDir() {
t.Error(test.Name, "- .dolt dir should exist after initialization")
}
} else {
// failed as expected
if !cliEnv.IsCWDEmpty() {
t.Error(test.Name, "- CWD should be empty after failure to initialize... unless it wasn't empty to start with")
}
}
}
}
func TestInitTwice(t *testing.T) {
cliEnv := createUninitializedEnv()
result := Init("dolt init", []string{"-name", "Bill Billerson", "-email", "bigbillieb@fake.horse"}, cliEnv)
if result != 0 {
t.Error("First init should succeed")
}
result = Init("dolt init", []string{"-name", "Bill Billerson", "-email", "bigbillieb@fake.horse"}, cliEnv)
if result == 0 {
t.Error("First init should succeed")
}
}
func TestInitWithNonEmptyDir(t *testing.T) {
cliEnv := createUninitializedEnv()
cliEnv.FS.WriteFile("file.txt", []byte("file contents."))
result := Init("dolt init", []string{"-name", "Bill Billerson", "-email", "bigbillieb@fake.horse"}, cliEnv)
if result == 0 {
t.Error("Init should fail if directory is not empty")
}
}
+101
View File
@@ -0,0 +1,101 @@
package commands
import (
"flag"
"fmt"
"github.com/attic-labs/noms/go/hash"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"os"
"strings"
)
type commitLoggerFunc func(*doltdb.CommitMeta, hash.Hash)
func logToStdOutFunc(cm *doltdb.CommitMeta, ch hash.Hash) {
fmt.Println(color.YellowString("commit %s", ch.String()))
printAuthor(cm)
printDate(cm)
printDesc(cm)
}
func printAuthor(cm *doltdb.CommitMeta) {
fmt.Printf("Author: %s <%s>\n", cm.Name, cm.Email)
}
func printDate(cm *doltdb.CommitMeta) {
timeStr := cm.FormatTS()
fmt.Println("Date: ", timeStr)
}
func printDesc(cm *doltdb.CommitMeta) {
formattedDesc := "\n\t" + strings.Replace(cm.Description, "\n", "\n\t", -1) + "\n"
fmt.Println(formattedDesc)
}
func Log(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
return logWithLoggerFunc(commandStr, args, cliEnv, logToStdOutFunc)
}
func logUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func logWithLoggerFunc(commandStr string, args []string, cliEnv *env.DoltCLIEnv, loggerFunc commitLoggerFunc) int {
cwb := cliEnv.RepoState.CWBHeadSpec()
commit, err := cliEnv.DoltDB.Resolve(cwb)
if err != nil {
fmt.Fprintln(os.Stderr, color.HiRedString("Fatal error: cannot get HEAD commit for current branch."))
return 1
}
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = initUsage(fs)
n := fs.Int("n", 30, "Number of commits to print. -1 To print all commits")
fs.Parse(args)
err = logCommit(cliEnv.DoltDB, commit, n, loggerFunc)
if err != nil {
fmt.Fprintln(os.Stderr, "Error printing commit.")
return 1
}
return 0
}
func logCommit(ddb *doltdb.DoltDB, commit *doltdb.Commit, n *int, loggerFunc commitLoggerFunc) error {
hash := commit.HashOf()
cm := commit.GetCommitMeta()
loggerFunc(cm, hash)
if *n != -1 {
*n = *n - 1
}
numParents := commit.NumParents()
for i := 0; i < numParents && (*n == -1 || *n > 0); i++ {
parentCommit, err := ddb.ResolveParent(commit, i)
if err != nil {
return err
}
err = logCommit(ddb, parentCommit, n, loggerFunc)
if err != nil {
return err
}
return err
}
return nil
}
+21
View File
@@ -0,0 +1,21 @@
package commands
import (
"fmt"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"testing"
)
func TestLog(t *testing.T) {
cliEnv := createUninitializedEnv()
err := cliEnv.InitRepo("Bill Billerson", "bigbillieb@fake.horse")
if err != nil {
t.Error("Failed to init repo")
}
cs, _ := doltdb.NewCommitSpec("HEAD", "master")
commit, _ := cliEnv.DoltDB.Resolve(cs)
fmt.Println(commit)
}
+48
View File
@@ -0,0 +1,48 @@
package commands
import (
"flag"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"os"
"sort"
)
func lsUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Ls(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = lsUsage(fs)
fs.Parse(args)
working, verr := getWorking(cliEnv)
if verr == nil {
verr = printTables(working)
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
return 0
}
func printTables(root *doltdb.RootValue) errhand.VerboseError {
tblNames := root.GetTableNames()
sort.Strings(tblNames)
fmt.Println("Tables in the working set:")
for _, tbl := range tblNames {
fmt.Println("\t", tbl)
}
return nil
}
+95
View File
@@ -0,0 +1,95 @@
package commands
import (
"flag"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"os"
"strings"
)
func resetUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Reset(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = resetUsage(fs)
fs.Parse(args)
stagedRoot, headRoot, verr := getStagedAndHead(cliEnv)
if verr == nil {
tbls := fs.Args()
if len(tbls) == 0 {
tbls = allTables(stagedRoot, headRoot)
}
verr = validateTables(tbls, stagedRoot, headRoot)
if verr == nil {
stagedRoot, verr = resetStaged(cliEnv, tbls, stagedRoot, headRoot)
if verr == nil {
printNotStaged(cliEnv, stagedRoot)
return 0
}
}
}
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
func printNotStaged(cliEnv *env.DoltCLIEnv, staged *doltdb.RootValue) {
// Printing here is best effort. Fail silently
working, err := cliEnv.WorkingRoot()
if err != nil {
return
}
notStaged := NewTableDiffs(working, staged)
if notStaged.numRemoved+notStaged.numModified > 0 {
fmt.Println("Unstaged changes after reset:")
lines := make([]string, 0, notStaged.Len())
for _, tblName := range notStaged.sortedTables {
tdt := notStaged.tableToType[tblName]
if tdt != addedTable {
lines = append(lines, fmt.Sprintf("%s\t%s", tdt.ShortLabel(), tblName))
}
}
fmt.Println(strings.Join(lines, "\n"))
}
}
func resetStaged(cliEnv *env.DoltCLIEnv, tbls []string, staged, head *doltdb.RootValue) (*doltdb.RootValue, errhand.VerboseError) {
updatedRoot := staged.UpdateTablesFromOther(tbls, head)
return updatedRoot, cliEnv.UpdateStagedRoot(updatedRoot)
}
func getStagedAndHead(cliEnv *env.DoltCLIEnv) (*doltdb.RootValue, *doltdb.RootValue, errhand.VerboseError) {
stagedRoot, err := cliEnv.StagedRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to get staged.").AddCause(err).Build()
}
headRoot, err := cliEnv.HeadRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to get at HEAD.").AddCause(err).Build()
}
return stagedRoot, headRoot, nil
}
+64
View File
@@ -0,0 +1,64 @@
package commands
import (
"flag"
"fmt"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"os"
)
func rmUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Rm(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = rmUsage(fs)
fs.Parse(args)
if fs.NArg() == 0 {
fs.Usage()
return 1
}
working, verr := getWorking(cliEnv)
if verr == nil {
verr = validateTables(fs.Args(), working)
if verr == nil {
verr = removeTables(cliEnv, fs.Args(), working)
}
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
return 0
}
func getWorking(cliEnv *env.DoltCLIEnv) (*doltdb.RootValue, errhand.VerboseError) {
working, err := cliEnv.WorkingRoot()
if err != nil {
return nil, errhand.BuildDError("Unable to get working.").AddCause(err).Build()
}
return working, nil
}
func removeTables(cliEnv *env.DoltCLIEnv, tables []string, working *doltdb.RootValue) errhand.VerboseError {
working, err := working.RemoveTabels(tables)
if err != nil {
return errhand.BuildDError("Unable to remove table(s)").AddCause(err).Build()
}
return cliEnv.UpdateWorkingRoot(working)
}
+72
View File
@@ -0,0 +1,72 @@
package commands
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped/fwt"
"os"
)
func showUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Show(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = showUsage(fs)
tblName := fs.String("table", "", "A table to show")
fs.Parse(args)
if *tblName == "" {
fmt.Fprintln(os.Stderr, "Missing required parameter \"-table\"")
return 1
}
working, verr := getWorking(cliEnv)
if verr == nil {
verr = printTable(working, *tblName)
}
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
return 0
}
func printTable(working *doltdb.RootValue, tblName string) errhand.VerboseError {
tbl, _ := working.GetTable(tblName)
tblSch := tbl.GetSchema(working.VRW())
rd := noms.NewNomsMapReader(tbl.GetRowData(), tblSch)
defer rd.Close()
mapping := untyped.TypedToUntypedMapping(tblSch)
outSch := mapping.DestSch
wr := fwt.NewTextWriter(os.Stdout, outSch, " | ")
defer wr.Close()
rConv, _ := table.NewRowConverter(mapping)
transform := table.NewRowTransformer("schema mapping transform", rConv.TransformRow)
autoSizeTransform := fwt.NewAutoSizingFWTTransformer(outSch, fwt.HashFillWhenTooLong, 0)
badRowCB := func(transfName string, row *table.Row, errDetails string) (quit bool) {
fmt.Fprintln(os.Stderr, color.RedString("Failed to transform row %s.", table.RowFmt(row)))
return true
}
pipeline := table.StartAsyncPipeline(rd, []table.TransformFunc{transform, autoSizeTransform.TransformToFWT}, wr, badRowCB)
pipeline.Wait()
return nil
}
+206
View File
@@ -0,0 +1,206 @@
package commands
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"os"
"sort"
"strings"
)
func statusUsage(fs *flag.FlagSet) func() {
return func() {
fs.PrintDefaults()
}
}
func Status(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fs := flag.NewFlagSet(commandStr, flag.ExitOnError)
fs.Usage = statusUsage(fs)
fs.Parse(args)
stagedDiffs, notStagedDiffs, verr := getTableDiffs(cliEnv)
if verr != nil {
fmt.Fprintln(os.Stderr, verr.Verbose())
return 1
}
printStatus(cliEnv, stagedDiffs, notStagedDiffs)
return 0
}
type tableDiffType int
const (
addedTable tableDiffType = iota
modifiedTable
removedTable
)
func (tdt tableDiffType) Label() string {
switch tdt {
case modifiedTable:
return "modified:"
case removedTable:
return "deleted:"
case addedTable:
return "new table:"
}
return "?"
}
func (tdt tableDiffType) ShortLabel() string {
switch tdt {
case modifiedTable:
return "M"
case removedTable:
return "D"
case addedTable:
return "N"
}
return "?"
}
type tableDiffs struct {
numAdded int
numModified int
numRemoved int
tableToType map[string]tableDiffType
sortedTables []string
}
func NewTableDiffs(newer, older *doltdb.RootValue) *tableDiffs {
added, modified, removed := newer.TableDiff(older)
var tbls []string
tbls = append(tbls, added...)
tbls = append(tbls, modified...)
tbls = append(tbls, removed...)
sort.Strings(tbls)
tblToType := make(map[string]tableDiffType)
for _, tbl := range added {
tblToType[tbl] = addedTable
}
for _, tbl := range modified {
tblToType[tbl] = modifiedTable
}
for _, tbl := range removed {
tblToType[tbl] = removedTable
}
return &tableDiffs{len(added), len(modified), len(removed), tblToType, tbls}
}
func (td *tableDiffs) Len() int {
return len(td.sortedTables)
}
func getTableDiffs(cliEnv *env.DoltCLIEnv) (*tableDiffs, *tableDiffs, errhand.VerboseError) {
headRoot, err := cliEnv.HeadRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to the get at HEAD.").AddCause(err).Build()
}
stagedRoot, err := cliEnv.StagedRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to the get staged.").AddCause(err).Build()
}
workingRoot, err := cliEnv.WorkingRoot()
if err != nil {
return nil, nil, errhand.BuildDError("Unable to the get working.").AddCause(err).Build()
}
stagedDiffs := NewTableDiffs(stagedRoot, headRoot)
notStagedDiffs := NewTableDiffs(workingRoot, stagedRoot)
return stagedDiffs, notStagedDiffs, nil
}
const (
branchHeader = "On branch %s\n"
stagedHeader = `Changes to be committed:
(use "dolt reset <table>..." to unstage)`
workingHeader = `Changes not staged for commit:
(use "dolt add <table>" to update what will be committed)
(use "dolt checkout <table>" to discard changes in working directory)`
untrackedHeader = `Untracked files:
(use "dolt add <table>" to include in what will be committed)`
statusFmt = "\t%-12s%s"
)
func printStatus(cliEnv *env.DoltCLIEnv, staged, notStaged *tableDiffs) {
needGap := false
fmt.Printf(branchHeader, cliEnv.RepoState.Branch)
if staged.Len() > 0 {
fmt.Println(stagedHeader)
lines := make([]string, 0, staged.Len())
for _, tblName := range staged.sortedTables {
tdt := staged.tableToType[tblName]
lines = append(lines, fmt.Sprintf(statusFmt, tdt.Label(), tblName))
}
fmt.Println(color.GreenString(strings.Join(lines, "\n")))
needGap = true
}
if notStaged.numRemoved+notStaged.numModified > 0 {
if needGap {
fmt.Println()
}
fmt.Println(workingHeader)
lines := make([]string, 0, notStaged.Len())
for _, tblName := range notStaged.sortedTables {
tdt := notStaged.tableToType[tblName]
if tdt != addedTable {
lines = append(lines, fmt.Sprintf(statusFmt, tdt.Label(), tblName))
}
}
fmt.Println(color.RedString(strings.Join(lines, "\n")))
needGap = true
}
if notStaged.numAdded > 0 {
if needGap {
fmt.Println()
}
fmt.Println(untrackedHeader)
lines := make([]string, 0, notStaged.Len())
for _, tblName := range notStaged.sortedTables {
tdt := notStaged.tableToType[tblName]
if tdt == addedTable {
lines = append(lines, fmt.Sprintf(statusFmt, tdt.Label(), tblName))
}
}
fmt.Println(color.RedString(strings.Join(lines, "\n")))
needGap = true
}
if !needGap {
fmt.Println("nothing to commit, working tree clean")
}
}
+15
View File
@@ -0,0 +1,15 @@
package commands
import (
"fmt"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
)
func Version(version string) cli.CommandFunc {
return func(commandStr string, args []string, cliEnv *env.DoltCLIEnv) int {
fmt.Println("The current dolt version is", version)
return 0
}
}
+2
View File
@@ -0,0 +1,2 @@
// dolt is a command line tool for working with dolt data repositories stored in noms.
package main
+49
View File
@@ -0,0 +1,49 @@
package main
import (
"flag"
"fmt"
"github.com/fatih/color"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/cli"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/commands/edit"
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"os"
)
const (
Version = "0.2"
)
var doltCommand = cli.GenSubCommandHandler([]*cli.Command{
{Name: "version", Desc: "Displays the current Dolt cli version", Func: commands.Version(Version), ReqRepo: false},
{Name: "config", Desc: "Dolt configuration.", Func: commands.Config, ReqRepo: false},
{Name: "init", Desc: "Create an empty Dolt data repository.", Func: commands.Init, ReqRepo: false},
{Name: "status", Desc: "Show the working tree status.", Func: commands.Status, ReqRepo: true},
{Name: "add", Desc: "Add table changes to the list of staged table changes.", Func: commands.Add, ReqRepo: true},
{Name: "reset", Desc: "Remove table changes from the list of staged table changes.", Func: commands.Reset, ReqRepo: true},
{Name: "commit", Desc: "Record changes to the repository", Func: commands.Commit, ReqRepo: true},
{Name: "log", Desc: "Show commit logs", Func: commands.Log, ReqRepo: true},
{Name: "rm", Desc: "Remove tables from the working set.", Func: commands.Rm, ReqRepo: true},
{Name: "ls", Desc: "List tables in the working set.", Func: commands.Ls, ReqRepo: true},
{Name: "show", Desc: "Show a table.", Func: commands.Show, ReqRepo: true},
{Name: "diff", Desc: "Diff a table.", Func: commands.Diff, ReqRepo: true},
{Name: "edit", Desc: "Create allows editing of tables.", Func: edit.Commands, ReqRepo: false},
})
func main() {
cliEnv := env.Load(env.GetCurrentUserHomeDir, filesys.LocalFS, doltdb.LocalDirDoltDB)
if cliEnv.CfgLoadErr != nil {
fmt.Fprintln(os.Stderr, color.RedString("Failed to load the global config.", cliEnv.CfgLoadErr))
os.Exit(1)
}
flag.Parse()
res := doltCommand("dolt", flag.Args(), cliEnv)
fmt.Println()
os.Exit(res)
}
+72
View File
@@ -0,0 +1,72 @@
package dtestutils
import (
"github.com/attic-labs/noms/go/types"
"github.com/google/uuid"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/untyped"
"strconv"
)
var UUIDS = []uuid.UUID{
uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000000")),
uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000001")),
uuid.Must(uuid.Parse("00000000-0000-0000-0000-000000000002"))}
var Names = []string{"Bill Billerson", "John Johnson", "Rob Robertson"}
var Ages = []uint64{32, 25, 21}
var Titles = []string{"Senior Dufus", "Dufus", ""}
var MaritalStatus = []bool{true, false, false}
var UntypedSchema = untyped.NewUntypedSchema([]string{"id", "name", "age", "title", "is_married"})
var TypedSchema = schema.NewSchema([]*schema.Field{
schema.NewField("id", types.UUIDKind, true),
schema.NewField("name", types.StringKind, true),
schema.NewField("age", types.UintKind, true),
schema.NewField("title", types.StringKind, false),
schema.NewField("is_married", types.BoolKind, true),
})
func init() {
TypedSchema.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{0}))
UntypedSchema.AddConstraint(schema.NewConstraint(schema.PrimaryKey, []int{0}))
}
func CreateTestDataTable(typed bool) (*table.InMemTable, *schema.Schema) {
sch := TypedSchema
if !typed {
sch = UntypedSchema
}
imt := table.NewInMemTable(sch)
for i := 0; i < len(UUIDS); i++ {
var valsMap map[string]types.Value
if typed {
valsMap = map[string]types.Value{
"id": types.UUID(UUIDS[i]),
"name": types.String(Names[i]),
"age": types.Uint(Ages[i]),
"title": types.String(Titles[i]),
"is_married": types.Bool(MaritalStatus[i]),
}
} else {
marriedStr := "true"
if !MaritalStatus[i] {
marriedStr = "false"
}
valsMap = map[string]types.Value{
"id": types.String(UUIDS[i].String()),
"name": types.String(Names[i]),
"age": types.String(strconv.FormatUint(Ages[i], 10)),
"title": types.String(Titles[i]),
"is_married": types.String(marriedStr),
}
}
imt.AppendRow(table.NewRow(table.RowDataFromValMap(sch, valsMap)))
}
return imt, sch
}
+36
View File
@@ -0,0 +1,36 @@
package dtestutils
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
)
const (
TestHomeDir = "/user/bheni"
WorkingDir = "/user/bheni/datasets/states"
)
func testHomeDirFunc() (string, error) {
return TestHomeDir, nil
}
func CreateTestEnv() *env.DoltCLIEnv {
const name = "billy bob"
const email = "bigbillieb@fake.horse"
initialDirs := []string{TestHomeDir, WorkingDir}
fs := filesys.NewInMemFS(initialDirs, nil, WorkingDir)
cliEnv := env.Load(testHomeDirFunc, fs, doltdb.InMemDoltDB)
cfg, _ := cliEnv.Config.GetConfig(env.GlobalConfig)
cfg.SetStrings(map[string]string{
env.UserNameKey: name,
env.UserEmailKey: email,
})
err := cliEnv.InitRepo(name, email)
if err != nil {
panic("Failed to initialize environment")
}
return cliEnv
}
+75
View File
@@ -0,0 +1,75 @@
package dtestutils
import (
"github.com/liquidata-inc/ld/dolt/go/cmd/dolt/env"
"github.com/liquidata-inc/ld/dolt/go/libraries/table"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
"testing"
)
func CheckResultTable(t *testing.T, tableName string, cliEnv *env.DoltCLIEnv, expectedTable *table.InMemTable, pkInExpectedTable string) {
root, err := cliEnv.WorkingRoot()
if err != nil {
t.Error("Could not get dolt working root value", err)
}
tbl, ok := root.GetTable(tableName)
if !ok {
t.Error("Could not find table")
return
}
tblRdr := noms.NewNomsMapReader(tbl.GetRowData(), tbl.GetSchema(cliEnv.DoltDB.ValueReadWriter()))
defer tblRdr.Close()
CheckResultsAgainstReader(t, tblRdr, tblRdr.GetSchema().GetPKIndex(), expectedTable, pkInExpectedTable)
}
func CheckResultsAgainstReader(t *testing.T, tblRdr table.TableReadCloser, tblPKIdx int, expectedTable *table.InMemTable, pkInExpectedTable string) {
expectedRdr := table.NewInMemTableReader(expectedTable)
defer expectedRdr.Close()
expectedPKIdx := expectedRdr.GetSchema().GetFieldIndex(pkInExpectedTable)
resultRowMap, _, err := table.ReadAllRowsToMap(tblRdr, tblPKIdx, false)
if err != nil {
t.Error("Could not read all rows from table to map.", err)
return
}
expectedRowMap, _, err := table.ReadAllRowsToMap(expectedRdr, expectedPKIdx, false)
if err != nil {
t.Error("Could not read all expected rows to a map.", err)
return
}
if len(resultRowMap) != len(expectedRowMap) {
t.Error("unexpected number of rows in map.")
return
}
for pk, expectedRows := range expectedRowMap {
actualRows, pkOk := resultRowMap[pk]
if !pkOk {
t.Error("Could not find row with key", pk, "in results.")
break
}
if len(actualRows) != 1 || len(expectedRows) != 1 {
t.Error("num rows with key", pk, "does not match expectation.")
break
}
expectedRow := expectedRows[0]
actualRow := actualRows[0]
if !table.RowsEqualIgnoringSchema(expectedRow, actualRow) {
t.Error(table.RowFmt(expectedRow), "!=", table.RowFmt(actualRow))
break
}
}
}
+144
View File
@@ -0,0 +1,144 @@
package env
import (
"errors"
"github.com/liquidata-inc/ld/dolt/go/libraries/config"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/set"
"strings"
)
const (
localConfigName = "local"
globalConfigName = "global"
UserEmailKey = "user.email"
UserNameKey = "user.name"
)
var LocalConfigWhitelist = set.NewStrSet([]string{UserNameKey, UserEmailKey})
var GlobalConfigWhitelist = set.NewStrSet([]string{UserNameKey, UserEmailKey})
// DoltConfigElement is an enum representing the elements that make up the ConfigHierarchy
type DoltConfigElement int
const (
// LocalConfig is the repository's local config portion of the ConfigHierarchy
LocalConfig DoltConfigElement = iota
// GlobalConfig is the user's global config portion of the ConfigHierarchy
GlobalConfig
)
// String gives the string name of an element that was used when it was added to the ConfigHierarchy, which is the
// same name that is used to retrieve that element of the string hierarchy.
func (ce DoltConfigElement) String() string {
switch ce {
case LocalConfig:
return localConfigName
case GlobalConfig:
return globalConfigName
}
return ""
}
// DoltCliConfig is the config for the cli
type DoltCliConfig struct {
config.ReadableConfig
ch *config.ConfigHierarchy
fs filesys.ReadWriteFS
}
func loadDoltCliConfig(hdp HomeDirProvider, fs filesys.ReadWriteFS) (*DoltCliConfig, error) {
ch := config.NewConfigHierarchy()
gPath, err := getGlobalCfgPath(hdp)
lPath := getLocalConfigPath()
gCfg, err := ensureGlobalConfig(gPath, fs)
if err != nil {
return nil, err
}
ch.AddConfig(globalConfigName, gCfg)
if exists, _ := fs.Exists(lPath); exists {
lCfg, err := config.FromFile(lPath, fs)
if err == nil {
ch.AddConfig(localConfigName, lCfg)
}
}
return &DoltCliConfig{ch, ch, fs}, nil
}
func ensureGlobalConfig(path string, fs filesys.ReadWriteFS) (config.ReadWriteConfig, error) {
if exists, isDir := fs.Exists(path); exists {
if isDir {
return nil, errors.New("A directory exists where this file should be. path: " + path)
}
return config.FromFile(path, fs)
}
return config.NewFileConfig(path, fs, map[string]string{})
}
// CreateLocalConfig creates a new repository local config file. The current directory must have already been initialized
// as a data repository before a local config can be created.
func (dcc *DoltCliConfig) CreateLocalConfig(vals map[string]string) error {
if exists, isDir := dcc.fs.Exists(getDoltDir()); !exists {
return errors.New(DoltDir + " directory not found. Is the current directory a repository directory?")
} else if !isDir {
return errors.New("A file exists with the name \"" + DoltDir + "\". This is not a valid file within a data repository directory.")
}
path := getLocalConfigPath()
cfg, err := config.NewFileConfig(path, dcc.fs, vals)
if err != nil {
return err
}
dcc.ch.AddConfig(localConfigName, cfg)
return nil
}
// GetConfig retrieves a specific element of the config hierarchy.
func (dcc *DoltCliConfig) GetConfig(element DoltConfigElement) (config.ReadWriteConfig, bool) {
return dcc.ch.GetConfig(element.String())
}
// GetStringOrDefault retrieves a string from the config hierarchy and returns it if available. Otherwise it returns
// the default string value
func (dcc *DoltCliConfig) GetStringOrDefault(key, defStr string) *string {
val, err := dcc.ch.GetString(key)
if err != nil {
return &defStr
}
return &val
}
// IfEmptyUseConfig looks at a strings value and if it is an empty string will try to return a value from the config
// hierarchy. If it is missing in the config a pointer to an empty string will be returned.
func (dcc *DoltCliConfig) IfEmptyUseConfig(val, key string) *string {
if len(strings.TrimSpace(val)) > 0 {
return &val
}
cfgVal, err := dcc.ch.GetString(key)
if err != nil {
s := ""
return &s
}
return &cfgVal
}
+38
View File
@@ -0,0 +1,38 @@
package env
import "testing"
const (
email = "bigbillieb@fake.horse"
name = "Billy Bob"
)
func TestConfig(t *testing.T) {
cliEnv := createTestEnv(true, true)
lCfg, _ := cliEnv.Config.GetConfig(LocalConfig)
gCfg, _ := cliEnv.Config.GetConfig(GlobalConfig)
lCfg.SetStrings(map[string]string{UserEmailKey: email})
gCfg.SetStrings(map[string]string{UserNameKey: name})
if *cliEnv.Config.GetStringOrDefault(UserEmailKey, "no") != email {
t.Error("Should return", email)
}
if *cliEnv.Config.GetStringOrDefault("bad_key", "yes") != "yes" {
t.Error("Should return default value of yes")
}
if *cliEnv.Config.IfEmptyUseConfig("", UserEmailKey) != email {
t.Error("Should return", email)
}
if *cliEnv.Config.IfEmptyUseConfig("not empty", UserEmailKey) != "not empty" {
t.Error("Should return default value")
}
if *cliEnv.Config.IfEmptyUseConfig("", "missing") != "" {
t.Error("Should return empty string")
}
}
+2
View File
@@ -0,0 +1,2 @@
// Package env provides access to get / set configuration and data repository state.
package env
+218
View File
@@ -0,0 +1,218 @@
package env
import (
"github.com/attic-labs/noms/go/hash"
"github.com/attic-labs/noms/go/types"
"github.com/liquidata-inc/ld/dolt/go/libraries/doltdb"
"github.com/liquidata-inc/ld/dolt/go/libraries/errhand"
"github.com/liquidata-inc/ld/dolt/go/libraries/filesys"
"github.com/liquidata-inc/ld/dolt/go/libraries/schema"
"github.com/liquidata-inc/ld/dolt/go/libraries/table/typed/noms"
)
const (
// The directory where configuration and state information will be written within a data repo directory
DoltDir = ".dolt"
)
// DoltCLIEnv holds the state of the current environment used by the cli.
type DoltCLIEnv struct {
Config *DoltCliConfig
CfgLoadErr error
RepoState *RepoState
RSLoadErr error
DoltDB *doltdb.DoltDB
FS filesys.Filesys
}
// Load loads the DoltCLIEnv for the current directory of the cli
func Load(hdp HomeDirProvider, fs filesys.Filesys, loc doltdb.DoltDBLocation) *DoltCLIEnv {
config, cfgErr := loadDoltCliConfig(hdp, fs)
repoState, rsErr := LoadRepoState(fs)
ddb := doltdb.LoadDoltDB(loc)
return &DoltCLIEnv{
config,
cfgErr,
repoState,
rsErr,
ddb,
fs,
}
}
// HasLDDir returns true of the DoltDir directory exists and is a valid directory
func (cliEnv *DoltCLIEnv) HasLDDir() bool {
exists, isDir := cliEnv.FS.Exists(getDoltDir())
return exists && isDir
}
// IsCWDEmpty returns wheather the current working directory is empty or not.
func (cliEnv *DoltCLIEnv) IsCWDEmpty() bool {
isEmpty := true
cliEnv.FS.Iter("./", true, func(_ string, _ int64, _ bool) bool {
isEmpty = false
return true
})
return isEmpty
}
// HasLocalConfig returns true if a repository local config file
func (cliEnv *DoltCLIEnv) HasLocalConfig() bool {
_, ok := cliEnv.Config.GetConfig(LocalConfig)
return ok
}
func (cliEnv *DoltCLIEnv) bestEffortDeleteAllFromCWD() {
fileToIsDir := make(map[string]bool)
cliEnv.FS.Iter("./", false, func(path string, size int64, isDir bool) (stop bool) {
fileToIsDir[path] = isDir
return false
})
for path, isDir := range fileToIsDir {
if isDir {
cliEnv.FS.Delete(path, true)
} else {
cliEnv.FS.DeleteFile(path)
}
}
}
// InitRepo takes an empty directory and initializes it with a .dolt directory containing repo state, and creates a noms
// database with dolt structure.
func (cliEnv *DoltCLIEnv) InitRepo(name, email string) errhand.VerboseError {
if !cliEnv.IsCWDEmpty() {
bdr := errhand.BuildDError("Unable to initialize the current directory.")
bdr.AddDetails("dolt will only allow empty directoriese to be initialized.")
return bdr.Build()
}
err := cliEnv.FS.MkDirs(DoltDir)
if err != nil {
bdr := errhand.BuildDError("Unable to make directory %s within the working directory.", DoltDir)
return bdr.AddCause(err).Build()
}
err = cliEnv.Config.CreateLocalConfig(map[string]string{})
if err != nil {
cliEnv.bestEffortDeleteAllFromCWD()
bdr := errhand.BuildDError("Failed to create an empty local data repository configuration file.")
bdr.AddDetails("Failed creating file %s.", getLocalConfigPath())
return bdr.AddCause(err).Build()
}
err = cliEnv.DoltDB.WriteEmptyRepo(name, email)
if err != nil {
cliEnv.bestEffortDeleteAllFromCWD()
bdr := errhand.BuildDError("Unable to create the local data repository.")
return bdr.AddCause(err).Build()
}
cs, _ := doltdb.NewCommitSpec("HEAD", "master")
commit, _ := cliEnv.DoltDB.Resolve(cs)
rootHash := commit.GetRootValue().HashOf()
cliEnv.RepoState, err = CreateRepoState(cliEnv.FS, "master", rootHash)
if err != nil {
cliEnv.bestEffortDeleteAllFromCWD()
bdr := errhand.BuildDError("Unable to write the initial repository state.")
bdr.AddDetails("Failed creating file %s.", getRepoStateFile())
return bdr.AddCause(err).Build()
}
return nil
}
func (cliEnv *DoltCLIEnv) WorkingRoot() (*doltdb.RootValue, error) {
hashStr := cliEnv.RepoState.Working
h := hash.Parse(hashStr)
return cliEnv.DoltDB.ReadRootValue(h)
}
func (cliEnv *DoltCLIEnv) UpdateWorkingRoot(newRoot *doltdb.RootValue) errhand.VerboseError {
h, err := cliEnv.DoltDB.WriteRootValue(newRoot)
if err != nil {
bdr := errhand.BuildDError("Unable to write table to the noms DB.")
return bdr.AddCause(err).Build()
}
cliEnv.RepoState.Working = h.String()
err = cliEnv.RepoState.Save()
if err != nil {
bdr := errhand.BuildDError("Unable to save an updated working value to the local data repositories state.")
return bdr.AddCause(err).Build()
}
return nil
}
func (cliEnv *DoltCLIEnv) HeadRoot() (*doltdb.RootValue, error) {
cs, _ := doltdb.NewCommitSpec("head", cliEnv.RepoState.Branch)
commit, err := cliEnv.DoltDB.Resolve(cs)
if err != nil {
return nil, err
}
return commit.GetRootValue(), nil
}
func (cliEnv *DoltCLIEnv) StagedRoot() (*doltdb.RootValue, error) {
hashStr := cliEnv.RepoState.Staged
h := hash.Parse(hashStr)
return cliEnv.DoltDB.ReadRootValue(h)
}
func (cliEnv *DoltCLIEnv) UpdateStagedRoot(newRoot *doltdb.RootValue) errhand.VerboseError {
h, err := cliEnv.DoltDB.WriteRootValue(newRoot)
if err != nil {
bdr := errhand.BuildDError("Unable to write table to the noms DB.")
return bdr.AddCause(err).Build()
}
cliEnv.RepoState.Staged = h.String()
err = cliEnv.RepoState.Save()
if err != nil {
bdr := errhand.BuildDError("Unable to save an updated staged value to the local data repositories state.")
return bdr.AddCause(err).Build()
}
return nil
}
func (cliEnv *DoltCLIEnv) PutTableToWorking(rows types.Map, sch *schema.Schema, tableName string) errhand.VerboseError {
root, err := cliEnv.WorkingRoot()
if err != nil {
bdr := errhand.BuildDError("Unable to get working root.")
return bdr.AddCause(err).Build()
}
vrw := cliEnv.DoltDB.ValueReadWriter()
schVal, err := noms.MarshalAsNomsValue(vrw, sch)
if err != nil {
bdr := errhand.BuildDError("Unable to marshal schema as noms value.")
return bdr.AddCause(err).Build()
}
tbl := doltdb.NewTable(vrw, schVal, rows)
newRoot := root.PutTable(cliEnv.DoltDB, tableName, tbl)
return cliEnv.UpdateWorkingRoot(newRoot)
}

Some files were not shown because too many files have changed in this diff Show More