mirror of
https://codeberg.org/shroff/phylum.git
synced 2025-12-31 08:20:09 -06:00
Clean up some queries
This commit is contained in:
@@ -27,19 +27,19 @@ func New(q phylumsql.Queries, cs ContentStore, prefix string) Pgfs {
|
||||
}
|
||||
|
||||
func (p Pgfs) Open(ctx context.Context, name string) (io.ReadCloser, error) {
|
||||
resource, err := p.getResource(ctx, name)
|
||||
resource, err := p.resourceByPath(ctx, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p.cs.Open(resource.ID)
|
||||
}
|
||||
func (p Pgfs) Stat(ctx context.Context, name string) (*webdav.FileInfo, error) {
|
||||
resource, err := p.getResource(ctx, name)
|
||||
resource, err := p.resourceByPath(ctx, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
val := &webdav.FileInfo{
|
||||
Path: string(resource.Path),
|
||||
Path: string(p.prefix + resource.Path),
|
||||
Size: int64(resource.Size.Int32),
|
||||
ModTime: resource.Modified.Time,
|
||||
IsDir: resource.Dir,
|
||||
@@ -49,26 +49,23 @@ func (p Pgfs) Stat(ctx context.Context, name string) (*webdav.FileInfo, error) {
|
||||
return val, nil
|
||||
}
|
||||
func (p Pgfs) ReadDir(ctx context.Context, name string, recursive bool) ([]webdav.FileInfo, error) {
|
||||
dir, err := p.getResource(ctx, name)
|
||||
dir, err := p.resourceByPath(ctx, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !dir.Dir {
|
||||
return nil, fs.ErrInvalid
|
||||
}
|
||||
maxDepth := 1
|
||||
if recursive {
|
||||
maxDepth = 1000
|
||||
}
|
||||
children, err := p.q.ReadDir(ctx, phylumsql.ReadDirParams{PathPrefix: dir.Path + "/", ID: dir.ID, MaxDepth: int32(maxDepth)})
|
||||
children, err := p.q.ReadDir(ctx, phylumsql.ReadDirParams{ID: dir.ID, Recursive: recursive})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]webdav.FileInfo, len(children))
|
||||
prefix := p.prefix + dir.Path
|
||||
for i, c := range children {
|
||||
result[i] = webdav.FileInfo{
|
||||
Path: string(c.Path),
|
||||
Path: string(prefix + c.Path),
|
||||
Size: int64(c.Size.Int32),
|
||||
ModTime: c.Modified.Time,
|
||||
IsDir: c.Dir,
|
||||
@@ -79,7 +76,7 @@ func (p Pgfs) ReadDir(ctx context.Context, name string, recursive bool) ([]webda
|
||||
return result, nil
|
||||
}
|
||||
func (p Pgfs) Create(ctx context.Context, name string) (io.WriteCloser, error) {
|
||||
resource, _ := p.getResource(ctx, name)
|
||||
resource, _ := p.resourceByPath(ctx, name)
|
||||
if resource != nil {
|
||||
return p.cs.Create(resource.ID, func(h hash.Hash, err error) {
|
||||
if err != nil {
|
||||
@@ -96,7 +93,7 @@ func (p Pgfs) Create(ctx context.Context, name string) (io.WriteCloser, error) {
|
||||
}
|
||||
index := strings.LastIndex(name, "/")
|
||||
parentPath := name[0:index]
|
||||
parent, err := p.getResource(ctx, parentPath)
|
||||
parent, err := p.resourceByPath(ctx, parentPath)
|
||||
if err != nil {
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
@@ -123,7 +120,7 @@ func (p Pgfs) Create(ctx context.Context, name string) (io.WriteCloser, error) {
|
||||
}
|
||||
|
||||
func (p Pgfs) RemoveAll(ctx context.Context, name string) error {
|
||||
resource, _ := p.getResource(ctx, name)
|
||||
resource, _ := p.resourceByPath(ctx, name)
|
||||
if resource == nil {
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
@@ -131,13 +128,13 @@ func (p Pgfs) RemoveAll(ctx context.Context, name string) error {
|
||||
}
|
||||
|
||||
func (p Pgfs) Mkdir(ctx context.Context, name string) error {
|
||||
resource, _ := p.getResource(ctx, name)
|
||||
resource, _ := p.resourceByPath(ctx, name)
|
||||
if resource != nil {
|
||||
return fs.ErrExist
|
||||
}
|
||||
index := strings.LastIndex(name, "/")
|
||||
parentPath := name[0:index]
|
||||
parent, err := p.getResource(ctx, parentPath)
|
||||
parent, err := p.resourceByPath(ctx, parentPath)
|
||||
if err != nil {
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
@@ -154,7 +151,7 @@ func (p Pgfs) Move(ctx context.Context, name, dest string, options *webdav.MoveO
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (p Pgfs) getResource(ctx context.Context, name string) (*phylumsql.ResourceByPathRow, error) {
|
||||
func (p Pgfs) resourceByPath(ctx context.Context, name string) (*phylumsql.ResourceByPathRow, error) {
|
||||
path := strings.TrimPrefix(name, p.prefix+"/")
|
||||
segments := strings.Split(strings.TrimRight(path, "/"), "/")
|
||||
if len(segments) == 0 {
|
||||
@@ -167,7 +164,7 @@ func (p Pgfs) getResource(ctx context.Context, name string) (*phylumsql.Resource
|
||||
}
|
||||
|
||||
//TODO: Permissions checks
|
||||
res, err := p.q.ResourceByPath(ctx, phylumsql.ResourceByPathParams{PathPrefix: p.prefix + "/", Search: segments, Root: root.ID})
|
||||
res, err := p.q.ResourceByPath(ctx, phylumsql.ResourceByPathParams{Search: segments, Root: root.ID})
|
||||
if err != nil {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
@@ -144,23 +144,22 @@ func (q *Queries) FindRoot(ctx context.Context, name string) (Resource, error) {
|
||||
}
|
||||
|
||||
const readDir = `-- name: ReadDir :many
|
||||
|
||||
WITH RECURSIVE nodes(id, parent, name, dir, created, modified, size, etag, depth, path) AS (
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 1, concat($1::text || r.name)::text
|
||||
FROM resources r WHERE r.parent = $2::uuid AND deleted IS NULL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, ''::text
|
||||
FROM resources r WHERE r.id = $1::uuid
|
||||
UNION ALL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, n.depth + 1, concat(n.path, '/', r.name)
|
||||
FROM resources r JOIN nodes n on r.parent = n.id
|
||||
WHERE deleted IS NULL
|
||||
AND depth < $3::int
|
||||
-- AND depth < @max_depth::int
|
||||
AND depth < CASE WHEN $2::boolean THEN 1000 ELSE 1 END
|
||||
)
|
||||
SELECT id, parent, name, dir, created, modified, size, etag, depth, path from nodes
|
||||
`
|
||||
|
||||
type ReadDirParams struct {
|
||||
PathPrefix string
|
||||
ID uuid.UUID
|
||||
MaxDepth int32
|
||||
ID uuid.UUID
|
||||
Recursive bool
|
||||
}
|
||||
|
||||
type ReadDirRow struct {
|
||||
@@ -176,9 +175,8 @@ type ReadDirRow struct {
|
||||
Path string
|
||||
}
|
||||
|
||||
// SELECT * from resources WHERE deleted IS NULL AND parent = $1;
|
||||
func (q *Queries) ReadDir(ctx context.Context, arg ReadDirParams) ([]ReadDirRow, error) {
|
||||
rows, err := q.db.Query(ctx, readDir, arg.PathPrefix, arg.ID, arg.MaxDepth)
|
||||
rows, err := q.db.Query(ctx, readDir, arg.ID, arg.Recursive)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -210,8 +208,8 @@ func (q *Queries) ReadDir(ctx context.Context, arg ReadDirParams) ([]ReadDirRow,
|
||||
|
||||
const resourceByPath = `-- name: ResourceByPath :one
|
||||
WITH RECURSIVE nodes(id, parent, name, dir, created, modified, size, etag, depth, path, search) AS (
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, concat($1::text, r.name)::text, $2::text[]
|
||||
FROM resources r WHERE r.id = $3::uuid
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, concat('/', r.name)::text, $1::text[]
|
||||
FROM resources r WHERE r.id = $2::uuid
|
||||
UNION ALL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, n.depth + 1, concat(n.path, '/', r.name), n.search
|
||||
FROM resources r JOIN nodes n on r.parent = n.id
|
||||
@@ -222,9 +220,8 @@ SELECT id, parent, name, dir, created, modified, size, etag, depth, path, search
|
||||
`
|
||||
|
||||
type ResourceByPathParams struct {
|
||||
PathPrefix string
|
||||
Search []string
|
||||
Root uuid.UUID
|
||||
Search []string
|
||||
Root uuid.UUID
|
||||
}
|
||||
|
||||
type ResourceByPathRow struct {
|
||||
@@ -242,7 +239,7 @@ type ResourceByPathRow struct {
|
||||
}
|
||||
|
||||
func (q *Queries) ResourceByPath(ctx context.Context, arg ResourceByPathParams) (ResourceByPathRow, error) {
|
||||
row := q.db.QueryRow(ctx, resourceByPath, arg.PathPrefix, arg.Search, arg.Root)
|
||||
row := q.db.QueryRow(ctx, resourceByPath, arg.Search, arg.Root)
|
||||
var i ResourceByPathRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
|
||||
@@ -31,23 +31,21 @@ RETURNING *;
|
||||
|
||||
|
||||
-- name: ReadDir :many
|
||||
-- SELECT * from resources WHERE deleted IS NULL AND parent = $1;
|
||||
|
||||
-- name: ReadDirRecursive :many
|
||||
WITH RECURSIVE nodes(id, parent, name, dir, created, modified, size, etag, depth, path) AS (
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 1, concat(@path_prefix::text || r.name)::text
|
||||
FROM resources r WHERE r.parent = @id::uuid AND deleted IS NULL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, ''::text
|
||||
FROM resources r WHERE r.id = @id::uuid
|
||||
UNION ALL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, n.depth + 1, concat(n.path, '/', r.name)
|
||||
FROM resources r JOIN nodes n on r.parent = n.id
|
||||
WHERE deleted IS NULL
|
||||
AND depth < @max_depth::int
|
||||
-- AND depth < @max_depth::int
|
||||
AND depth < CASE WHEN @recursive::boolean THEN 1 ELSE 1000 END
|
||||
)
|
||||
SELECT * from nodes;
|
||||
|
||||
-- name: ResourceByPath :one
|
||||
WITH RECURSIVE nodes(id, parent, name, dir, created, modified, size, etag, depth, path, search) AS (
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, concat(@path_prefix::text, r.name)::text, @search::text[]
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, 0, concat('/', r.name)::text, @search::text[]
|
||||
FROM resources r WHERE r.id = @root::uuid
|
||||
UNION ALL
|
||||
SELECT r.id, r.parent, r.name, r.dir, r.created, r.modified, r.size, r.etag, n.depth + 1, concat(n.path, '/', r.name), n.search
|
||||
|
||||
Reference in New Issue
Block a user