Implementing guest upload / file requests (#337)

* Add API call to download files and optionally not increasing counter, added Download API permission

* Added option for presigned URLs and add download button in main menu

* Breaking: Check that chunks are at least 5MB

* Added docs

* A lot of refactoring and minor fixes
This commit is contained in:
Marc Bulling
2026-01-26 16:23:07 +01:00
committed by GitHub
parent e2ec083f1e
commit 325849b94a
101 changed files with 7540 additions and 1005 deletions
+6 -6
View File
@@ -72,16 +72,16 @@ func getPaths() []converter {
Name: "wasm_exec JS",
})
result = append(result, converter{
InputPath: pathPrefix + "js/dateformat.js",
OutputPath: pathPrefix + "js/min/dateformat.min.js",
InputPath: pathPrefix + "js/all_public.js",
OutputPath: pathPrefix + "js/min/all_public.min.js",
Type: "text/javascript",
Name: "Dateformat JS",
Name: "Public functions JS",
})
result = append(result, converter{
InputPath: pathPrefix + "js/uuid.js",
OutputPath: pathPrefix + "js/min/uuid.min.js",
InputPath: pathPrefix + "js/public_upload.js",
OutputPath: pathPrefix + "js/min/public_upload.min.js",
Type: "text/javascript",
Name: "UUID JS",
Name: "Public upload JS",
})
return result
}
+45 -13
View File
@@ -81,14 +81,28 @@ func hasRequiredTag(tags []string) bool {
return false
}
func headerExists(headerName string, required, isString bool) string {
func hasBase64Tag(tags []string) bool {
// Check if the tag contains "supportBase64:true"
for _, tag := range tags {
if strings.HasPrefix(tag, "supportBase64") {
return true
}
}
return false
}
func headerExists(headerName string, required, isString, base64Support bool) string {
base64SupportEntry := ""
if base64Support {
base64SupportEntry = ", has base64support"
}
return fmt.Sprintf("\n"+`
// RequestParser header value %s, required: %v
// RequestParser header value %s, required: %v%s
exists, err = checkHeaderExists(r, %s, %v, %v)
if err != nil {
return err
}
p.foundHeaders[%s] = exists`, headerName, required, headerName, required, isString, headerName)
p.foundHeaders[%s] = exists`, headerName, required, base64SupportEntry, headerName, required, isString, headerName)
}
func generateParseRequestMethod(typeName string, fields []*ast.Field) string {
@@ -122,25 +136,41 @@ func generateParseRequestMethod(typeName string, fields []*ast.Field) string {
// Check if the tag has the "header" key and extract its value
tagParts := strings.Split(tag, " ")
required := hasRequiredTag(tagParts)
base64Support := hasBase64Tag(tagParts)
for _, part := range tagParts {
if strings.HasPrefix(part, "header:") {
// Extract header name after 'header:'
// Extract the header name after 'header:'
headerName := strings.TrimPrefix(part, "header:")
fieldType := field.Type.(*ast.Ident).Name
// Use appropriate parsing function based on the field type
// Use the appropriate parsing function based on the field type
switch fieldType {
case "string":
method += headerExists(headerName, required, true)
method += fmt.Sprintf(`
if (exists) {
p.%s = r.Header.Get(%s)
method += headerExists(headerName, required, true, base64Support)
if !base64Support {
method += fmt.Sprintf(`
if (exists) {
p.%s = r.Header.Get(%s)
}
`, field.Names[0].Name, headerName)
} else {
method += fmt.Sprintf(`
if (exists) {
p.%s = r.Header.Get(%s)
if strings.HasPrefix(p.%s, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.%s, "base64:"))
if err != nil {
return err
}
p.%s = string(decoded)
}
}
`, field.Names[0].Name, headerName, field.Names[0].Name, field.Names[0].Name, field.Names[0].Name)
}
`, field.Names[0].Name, headerName)
case "bool":
method += headerExists(headerName, required, false)
method += headerExists(headerName, required, false, false)
method += fmt.Sprintf(`
if (exists) {
p.%s, err = parseHeaderBool(r, %s)
@@ -151,7 +181,7 @@ func generateParseRequestMethod(typeName string, fields []*ast.Field) string {
`, field.Names[0].Name, headerName, strings.Replace(headerName, "\"", "", -1))
case "int":
method += headerExists(headerName, required, false)
method += headerExists(headerName, required, false, false)
method += fmt.Sprintf(`
if (exists) {
p.%s, err = parseHeaderInt(r, %s)
@@ -162,7 +192,7 @@ func generateParseRequestMethod(typeName string, fields []*ast.Field) string {
`, field.Names[0].Name, headerName, strings.Replace(headerName, "\"", "", -1))
case "int64":
method += headerExists(headerName, required, false)
method += headerExists(headerName, required, false, false)
method += fmt.Sprintf(`
if (exists) {
p.%s, err = parseHeaderInt64(r, %s)
@@ -236,8 +266,10 @@ func main() {
package api
import (
"encoding/base64"
"fmt"
"net/http"
"strings"
)
// Do not modify: This is an automatically generated file created by updateApiRouting.go
+6 -6
View File
@@ -124,15 +124,15 @@ func extractEnvVars() ([]envVar, error) {
})
result = append(result,
envVar{
Name: "DOCKER_NONROOT",
Action: "DEPRECATED.\n\nDocker only: Runs the binary in the container as a non-root user, if set to \"true\"",
Default: "false",
},
envVar{
Name: "TMPDIR",
Action: "Sets the path which contains temporary files",
Default: "Non-Docker: Default OS path\n\nDocker: [DATA_DIR]",
Default: "Non-Docker: Default OS path\nDocker: [DATA_DIR]",
},
envVar{
Name: "DOCKER_NONROOT",
Action: "DEPRECATED.\nDocker only: Runs the binary in the container as a non-root user, if set to \"true\"",
Default: "false",
},
)
+8 -2
View File
@@ -19,24 +19,30 @@ require (
golang.org/x/oauth2 v0.27.0
golang.org/x/sync v0.11.0
golang.org/x/term v0.37.0
golang.org/x/time v0.14.0
gopkg.in/yaml.v3 v3.0.1
modernc.org/sqlite v1.35.0
)
require (
github.com/alicebob/gopher-json v0.0.0-20230218143504-906a9b012302 // indirect
github.com/djherbis/atime v1.1.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-jose/go-jose/v4 v4.0.5 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jmoiron/sqlx v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect
github.com/tdewolff/minify/v2 v2.24.2 // indirect
github.com/tdewolff/parse/v2 v2.8.3 // indirect
github.com/tdewolff/argp v0.0.0-20250430135133-0f54527d2b1e // indirect
github.com/tdewolff/minify/v2 v2.24.8 // indirect
github.com/tdewolff/parse/v2 v2.8.5 // indirect
github.com/yuin/gopher-lua v1.1.1 // indirect
go.shabbyrobe.org/gocovmerge v0.0.0-20230507111327-fa4f82cfbf4d // indirect
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa // indirect
+12
View File
@@ -1,3 +1,4 @@
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/Kodeworks/golang-image-ico v0.0.0-20141118225523-73f0f4cfade9/go.mod h1:7uhhqiBaR4CpN0k9rMjOtjpcfGd6DG2m04zQxKnWQ0I=
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
github.com/alicebob/gopher-json v0.0.0-20230218143504-906a9b012302/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
@@ -8,21 +9,28 @@ github.com/caarlos0/env/v6 v6.10.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5
github.com/cevatbarisyilmaz/ara v0.0.4/go.mod h1:BfFOxnUd6Mj6xmcvRxHN3Sr21Z1T3U2MYkYOmoQe4Ts=
github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/djherbis/atime v1.1.0/go.mod h1:28OF6Y8s3NQWwacXc5eZTsEsiMzp7LF8MbXE+XJPdBE=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/gomodule/redigo v1.9.2/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/johannesboyne/gofakes3 v0.0.0-20250106100439-5c39aecd6999/go.mod h1:t6osVdP++3g4v2awHz4+HFccij23BbdT1rX3W7IijqQ=
github.com/juju/ratelimit v1.0.2/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
@@ -34,12 +42,15 @@ github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTd
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tdewolff/argp v0.0.0-20250430135133-0f54527d2b1e/go.mod h1:xw2b1X81m4zY1OGytzHNr/YKXbf/STHkK5idoNamlYE=
github.com/tdewolff/minify/v2 v2.23.11 h1:cZqTVCtuVvPC8/GbCvYgIcdAQGmoxEObZzKeKIUixTE=
github.com/tdewolff/minify/v2 v2.23.11/go.mod h1:vmkbfGQ5hp/eYB+TswNWKma67S0a+32HBL+mFWxjZ2Q=
github.com/tdewolff/minify/v2 v2.24.2/go.mod h1:1JrCtoZXaDbqioQZfk3Jdmr0GPJKiU7c1Apmb+7tCeE=
github.com/tdewolff/minify/v2 v2.24.8/go.mod h1:0Ukj0CRpo/sW/nd8uZ4ccXaV1rEVIWA3dj8U7+Shhfw=
github.com/tdewolff/parse/v2 v2.8.2-0.20250806174018-50048bb39781 h1:2qicgFovKg1XtX7Wf6GwexUdpb7q/jMIE2IgkYsVAvE=
github.com/tdewolff/parse/v2 v2.8.2-0.20250806174018-50048bb39781/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
github.com/tdewolff/parse/v2 v2.8.3/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
github.com/tdewolff/parse/v2 v2.8.5/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
github.com/tdewolff/test v1.0.11 h1:FdLbwQVHxqG16SlkGveC0JVyrJN62COWTRyUFzfbtBE=
github.com/tdewolff/test v1.0.11/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
@@ -96,6 +107,7 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+30 -9
View File
@@ -5,6 +5,10 @@ import (
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"github.com/forceu/gokapi/cmd/cli-uploader/cliapi"
"github.com/forceu/gokapi/cmd/cli-uploader/cliconfig"
"github.com/forceu/gokapi/cmd/cli-uploader/cliconstants"
@@ -12,9 +16,6 @@ import (
"github.com/forceu/gokapi/internal/environment"
"github.com/forceu/gokapi/internal/helper"
"github.com/schollz/progressbar/v3"
"io"
"os"
"path/filepath"
)
func main() {
@@ -25,9 +26,11 @@ func main() {
case cliflags.ModeLogout:
doLogout()
case cliflags.ModeUpload:
processUpload(false)
processUpload(cliflags.ModeUpload)
case cliflags.ModeArchive:
processUpload(true)
processUpload(cliflags.ModeArchive)
case cliflags.ModeDownload:
processDownload()
case cliflags.ModeInvalid:
os.Exit(3)
}
@@ -38,11 +41,11 @@ func doLogin() {
cliconfig.CreateLogin()
}
func processUpload(isArchive bool) {
func processUpload(mode int) {
cliconfig.Load()
uploadParam := cliflags.GetUploadParameters(isArchive)
uploadParam := cliflags.GetUploadParameters(mode)
if isArchive {
if mode == cliflags.ModeArchive {
zipPath, err := zipFolder(uploadParam.Directory, uploadParam.TmpFolder, !uploadParam.JsonOutput)
if err != nil {
fmt.Println(err)
@@ -57,7 +60,7 @@ func processUpload(isArchive bool) {
if err != nil {
fmt.Println()
if errors.Is(cliapi.ErrUnauthorised, err) {
fmt.Println("ERROR: Unauthorised API key. Please re-run login.")
fmt.Println("ERROR: Unauthorised API key. Please re-run login or make sure that the API key has the permission to upload files.")
} else {
fmt.Println("ERROR: Could not upload file")
fmt.Println(err)
@@ -77,6 +80,24 @@ func processUpload(isArchive bool) {
fmt.Println("File Download URL: " + result.UrlDownload)
}
func processDownload() {
cliconfig.Load()
uploadParam := cliflags.GetUploadParameters(cliflags.ModeDownload)
// Perform the download
err := cliapi.DownloadFile(uploadParam)
if err != nil {
fmt.Println()
if errors.Is(cliapi.ErrUnauthorised, err) {
fmt.Println("ERROR: Unauthorised API key. Please re-run login or make sure that the API key has the permission to download files.")
} else {
fmt.Println("ERROR: Could not download file")
fmt.Println(err)
}
os.Exit(1)
}
}
func doLogout() {
err := cliconfig.Delete()
if err != nil {
+116 -10
View File
@@ -6,12 +6,6 @@ import (
"encoding/json"
"errors"
"fmt"
"github.com/forceu/gokapi/cmd/cli-uploader/cliflags"
"github.com/forceu/gokapi/internal/encryption"
"github.com/forceu/gokapi/internal/encryption/end2end"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/schollz/progressbar/v3"
"io"
"mime/multipart"
"net/http"
@@ -20,6 +14,13 @@ import (
"strconv"
"strings"
"time"
"github.com/forceu/gokapi/cmd/cli-uploader/cliflags"
"github.com/forceu/gokapi/internal/encryption"
"github.com/forceu/gokapi/internal/encryption/end2end"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/schollz/progressbar/v3"
)
var gokapiUrl string
@@ -138,7 +139,7 @@ func getUrl(url string, headers []header, longTimeout bool) (string, error) {
}
// UploadFile uploads a file to the Gokapi server
func UploadFile(uploadParams cliflags.UploadConfig) (models.FileApiOutput, error) {
func UploadFile(uploadParams cliflags.FlagConfig) (models.FileApiOutput, error) {
var progressBar *progressbar.ProgressBar
file, err := os.OpenFile(uploadParams.File, os.O_RDONLY, 0664)
if err != nil {
@@ -146,6 +147,7 @@ func UploadFile(uploadParams cliflags.UploadConfig) (models.FileApiOutput, error
fmt.Println(err)
os.Exit(4)
}
defer file.Close()
maxSize, chunkSize, isE2e, err := GetConfig()
if err != nil {
return models.FileApiOutput{}, err
@@ -222,17 +224,121 @@ func UploadFile(uploadParams cliflags.UploadConfig) (models.FileApiOutput, error
return metaData, nil
}
func nameToBase64(f *os.File, uploadParams cliflags.UploadConfig) string {
// DownloadFile downloads a file from the Gokapi server
func DownloadFile(downloadParams cliflags.FlagConfig) error {
var progressBar *progressbar.ProgressBar
info, err := getFileInfo(downloadParams.DownloadId)
if err != nil {
fmt.Println("ERROR: Could not get file info or file does not exist")
return err
}
if downloadParams.OutputPath == "" {
downloadParams.OutputPath = "."
}
if downloadParams.FileName == "" {
downloadParams.FileName = info.Name
}
filename := downloadParams.OutputPath + "/" + downloadParams.FileName
exists, err := helper.FileExists(filename)
if err != nil {
fmt.Println("ERROR: Could not check if file already exists")
return err
}
if exists {
fmt.Println("ERROR: File already exists, please specify a different filename")
os.Exit(1)
}
if !helper.FolderExists(downloadParams.OutputPath) {
err = os.Mkdir(downloadParams.OutputPath, 0770)
if err != nil {
fmt.Println("ERROR: Could not create output directory")
return err
}
}
helper.CreateDir(downloadParams.OutputPath)
file, err := os.Create(downloadParams.OutputPath + "/" + downloadParams.FileName)
defer file.Close()
if err != nil {
fmt.Println("ERROR: Could not create new file")
return err
}
if !downloadParams.JsonOutput {
progressBar = progressbar.DefaultBytes(info.SizeBytes, "Downloading")
}
req, err := http.NewRequest("GET", gokapiUrl+"/files/download/"+downloadParams.DownloadId, nil)
if err != nil {
return err
}
req.Header.Add("apikey", apiKey)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
fmt.Println("ERROR: Could not download file: Status code " + strconv.Itoa(resp.StatusCode))
os.Exit(4)
}
if !downloadParams.JsonOutput {
_, err = io.Copy(file, io.TeeReader(resp.Body, progressBar))
} else {
_, err = io.Copy(file, resp.Body)
}
if err != nil {
fmt.Println("ERROR: Could not download file")
return err
}
if downloadParams.RemoveRemote {
err = deleteRemoteFile(downloadParams.DownloadId)
if err != nil {
return err
}
}
if !downloadParams.JsonOutput {
fmt.Println("File downloaded successfully")
} else {
fmt.Println("{\"result\":\"OK\"}")
}
return nil
}
func nameToBase64(f *os.File, uploadParams cliflags.FlagConfig) string {
return "base64:" + base64.StdEncoding.EncodeToString([]byte(getFileName(f, uploadParams)))
}
func getFileName(f *os.File, uploadParams cliflags.UploadConfig) string {
func getFileName(f *os.File, uploadParams cliflags.FlagConfig) string {
if uploadParams.FileName != "" {
return uploadParams.FileName
}
return filepath.Base(f.Name())
}
func getFileInfo(id string) (models.FileApiOutput, error) {
result, err := getUrl(gokapiUrl+"/files/list/"+id, []header{}, false)
if err != nil {
return models.FileApiOutput{}, err
}
var parsedResult models.FileApiOutput
err = json.Unmarshal([]byte(result), &parsedResult)
if err != nil {
return models.FileApiOutput{}, err
}
return parsedResult, nil
}
func deleteRemoteFile(id string) error {
_, err := getUrl(gokapiUrl+"/files/delete", []header{{"id", id}}, false)
return err
}
func uploadChunk(f io.Reader, uuid string, offset, chunkSize, filesize int64, progressBar *progressbar.ProgressBar) error {
body := new(bytes.Buffer)
writer := multipart.NewWriter(body)
@@ -299,7 +405,7 @@ func uploadChunk(f io.Reader, uuid string, offset, chunkSize, filesize int64, pr
return nil
}
func completeChunk(uid, filename string, filesize, realsize int64, useE2e bool, uploadParams cliflags.UploadConfig, progressBar *progressbar.ProgressBar) (models.FileApiOutput, error) {
func completeChunk(uid, filename string, filesize, realsize int64, useE2e bool, uploadParams cliflags.FlagConfig, progressBar *progressbar.ProgressBar) (models.FileApiOutput, error) {
type expectedFormat struct {
FileInfo models.FileApiOutput `json:"FileInfo"`
}
@@ -1,10 +1,10 @@
package cliconstants
// MinGokapiVersionInt is the minimum version of the gokapi server that is supported by the cli
const MinGokapiVersionInt = 20100
const MinGokapiVersionInt = 20200
// MinGokapiVersionStr is the minimum version of the gokapi server that is supported by the cli
const MinGokapiVersionStr = "2.1.0"
const MinGokapiVersionStr = "2.2.0"
// DefaultConfigFileName is the default config file name
const DefaultConfigFileName = "gokapi-cli.json"
+52 -17
View File
@@ -2,13 +2,14 @@ package cliflags
import (
"fmt"
"github.com/forceu/gokapi/cmd/cli-uploader/cliconstants"
"github.com/forceu/gokapi/internal/environment"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/forceu/gokapi/cmd/cli-uploader/cliconstants"
"github.com/forceu/gokapi/internal/environment"
)
const (
@@ -20,20 +21,25 @@ const (
ModeUpload
// ModeArchive is the mode for the archive command
ModeArchive
//ModeDownload is the mode for the download command
ModeDownload
// ModeInvalid is the mode for an invalid command
ModeInvalid
)
const version = "v1.0.0"
const version = "v1.1.0"
// UploadConfig contains the parameters for the upload command.
type UploadConfig struct {
// FlagConfig contains the parameters for the upload command.
type FlagConfig struct {
File string
Directory string
TmpFolder string
FileName string
OutputPath string
DownloadId string
JsonOutput bool
DisableE2e bool
RemoveRemote bool
ExpiryDays int
ExpiryDownloads int
Password string
@@ -54,6 +60,8 @@ func Parse() int {
return ModeUpload
case "upload-dir":
return ModeArchive
case "download":
return ModeDownload
case "help":
printUsage(0)
default:
@@ -63,8 +71,8 @@ func Parse() int {
}
// GetUploadParameters parses the command line arguments and returns the parameters for the upload command.
func GetUploadParameters(isArchive bool) UploadConfig {
result := UploadConfig{}
func GetUploadParameters(mode int) FlagConfig {
result := FlagConfig{}
for i := 2; i < len(os.Args); i++ {
switch os.Args[i] {
case "-j":
@@ -103,6 +111,22 @@ func GetUploadParameters(isArchive bool) UploadConfig {
fallthrough
case "--name":
result.FileName = getParameter(&i)
case "-i":
fallthrough
case "--id":
result.DownloadId = getParameter(&i)
case "-o":
fallthrough
case "--output":
result.FileName = getParameter(&i)
case "-k":
fallthrough
case "--ouput-path":
result.OutputPath = getParameter(&i)
case "-r":
fallthrough
case "--remove":
result.RemoveRemote = true
case "-h":
fallthrough
case "--help":
@@ -116,14 +140,14 @@ func GetUploadParameters(isArchive bool) UploadConfig {
result.ExpiryDays = 0
}
sanitiseFilename(&result)
if !checkRequiredUploadParameter(&result, isArchive) {
if !checkRequiredUploadParameter(&result, mode) {
os.Exit(2)
}
return result
}
func sanitiseFilename(config *UploadConfig) {
func sanitiseFilename(config *FlagConfig) {
if config.FileName == "" {
return
}
@@ -136,26 +160,31 @@ func sanitiseFilename(config *UploadConfig) {
config.FileName = illegalChars.ReplaceAllString(config.FileName, "_")
}
func checkRequiredUploadParameter(config *UploadConfig, isArchive bool) bool {
if isArchive && config.Directory != "" {
func checkRequiredUploadParameter(config *FlagConfig, mode int) bool {
if mode == ModeArchive && config.Directory != "" {
return true
}
if !isArchive && config.File != "" {
if mode == ModeUpload && config.File != "" {
return true
}
if mode == ModeDownload && config.DownloadId == "" {
fmt.Println("ERROR: Missing parameter --id")
return false
}
if !environment.IsDockerInstance() {
if isArchive {
if mode == ModeArchive {
fmt.Println("ERROR: Missing parameter --directory")
} else {
}
if mode == ModeUpload {
fmt.Println("ERROR: Missing parameter --file")
}
return false
}
ok, uploadPath := getDockerUpload(isArchive)
ok, uploadPath := getDockerUpload(mode == ModeArchive)
if !ok {
if isArchive {
if mode == ModeArchive {
fmt.Println("ERROR: Missing parameter --file and no file found in " + cliconstants.DockerFolderUpload)
} else {
fmt.Println("ERROR: Missing parameter --file and no file or more than one file found in " + cliconstants.DockerFolderUpload)
@@ -163,7 +192,7 @@ func checkRequiredUploadParameter(config *UploadConfig, isArchive bool) bool {
return false
}
if isArchive {
if mode == ModeArchive {
config.File = cliconstants.DockerFolderUpload
} else {
config.File = uploadPath
@@ -244,6 +273,7 @@ func printUsage(exitCode int) {
fmt.Println("Commands:")
fmt.Println(" login Save login credentials")
fmt.Println(" download Download a file from the Gokapi instance without increasing its download counter")
fmt.Println(" upload Upload a file to the Gokapi instance")
fmt.Println(" upload-dir Upload a folder as a zip file to the Gokapi instance")
fmt.Println(" logout Delete login credentials")
@@ -252,6 +282,7 @@ func printUsage(exitCode int) {
fmt.Println("Options:")
fmt.Println(" -f, --file <path> File to upload (required for \"upload\")")
fmt.Println(" -D, --directory <path> Folder to upload (required for \"upload-dir\")")
fmt.Println(" -i, --id <id> File ID to download (required for \"download\")")
fmt.Println(" -c, --configuration <path> Path to configuration file (default: gokapi-cli.json)")
fmt.Println(" -j, --json Output the result in JSON only")
fmt.Println(" -x, --disable-e2e Disable end-to-end encryption")
@@ -259,6 +290,9 @@ func printUsage(exitCode int) {
fmt.Println(" -d, --expiry-downloads <int> Set max allowed downloads (default: unlimited)")
fmt.Println(" -p, --password <string> Set a password for the file")
fmt.Println(" -n, --name <string> Change final filename for uploaded file")
fmt.Println(" -o, --output <string> Change the filename of the file to download")
fmt.Println(" -k, --output-path <path> The folder to download the file to (default: current folder)")
fmt.Println(" -r, --remove Remove remote file after download")
fmt.Println(" -t, --tmpfolder <path> Folder for temporary Zip file when uploading a directory")
fmt.Println(" -h, --help Show this help message")
fmt.Println()
@@ -268,6 +302,7 @@ func printUsage(exitCode int) {
fmt.Println(" gokapi-cli logout -c /path/to/config")
fmt.Println(" gokapi-cli upload -f /file/to/upload --expiry-days 7 --json")
fmt.Println(" gokapi-cli upload-dir -D /path/to/upload -t /mnt/tmp")
fmt.Println(" gokapi-cli download --remove -i chuTheishaipa9o -o myfile.zip")
fmt.Println()
os.Exit(exitCode)
}
+3 -3
View File
@@ -34,7 +34,7 @@ import (
// versionGokapi is the current version in readable form.
// Other version numbers can be modified in /build/go-generate/updateVersionNumbers.go
const versionGokapi = "2.1.0"
const versionGokapi = "2.2.0-dev"
// The following calls update the version numbers, update documentation, minify Js/CSS and build the WASM modules
//go:generate go run "../../build/go-generate/updateVersionNumbers.go"
@@ -66,7 +66,7 @@ func main() {
authentication.Init(configuration.Get().Authentication)
createSsl(passedFlags)
initCloudConfig(passedFlags)
go storage.CleanUp(true)
storage.CleanUp(true)
logging.LogStartup()
showDeprecationWarnings()
go webserver.Start()
@@ -110,7 +110,7 @@ func showVersion(passedFlags flagparser.MainFlags) {
}
func showDeprecationWarnings() {
for _, dep := range configuration.Environment.ActiveDeprecations {
for _, dep := range configuration.GetEnvironment().ActiveDeprecations {
fmt.Println()
fmt.Println("WARNING, deprecated feature: " + dep.Name)
fmt.Println(dep.Description)
+150 -92
View File
@@ -57,59 +57,73 @@ Available environment variables
==================================
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| Name | Action | Persistent [*]_ | Default |
+==============================+=====================================================================================+=================+=============================+
| GOKAPI_CHUNK_SIZE_MB | Sets the size of chunks that are uploaded in MB | Yes | 45 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_CONFIG_DIR | Sets the directory for the config file | No | config |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_CONFIG_FILE | Sets the name of the config file | No | config.json |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_DATA_DIR | Sets the directory for the data | Yes | data |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_DISABLE_CORS_CHECK | Disables the CORS check on startup and during setup, if set to true | No | false |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_ENABLE_HOTLINK_VIDEOS | Allow hotlinking of videos. Note: Due to buffering, playing a video might count as | No | false |
| | | | |
| | multiple downloads. It is only recommended to use video hotlinking for uploads with | | |
| | | | |
| | unlimited downloads enabled | | |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LENGTH_HOTLINK_ID | Sets the length of the hotlink IDs. Value must be 8 or greater | No | 40 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LENGTH_ID | Sets the length of the download IDs. Value must be 5 or greater | No | 15 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LOG_STDOUT | Also outputs all log file entries to the console output, if set to true | No | false |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_FILESIZE | Sets the maximum allowed file size in MB | Yes | 102400 |
| | | | |
| | Default 102400 = 100GB | | |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_MEMORY_UPLOAD | Sets the amount of RAM in MB that can be allocated for an upload chunk or file | Yes | 50 |
| | | | |
| | Any chunk or file with a size greater than that will be written to a temporary file | | |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_PARALLEL_UPLOADS | Set the number of chunks that are uploaded in parallel for a single file | Yes | 3 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MIN_FREE_SPACE | Sets the minium free space on the disk in MB for accepting an upload | No | 400 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MIN_LENGTH_PASSWORD | Sets the minium password length. Value must be 6 or greater | No | 8 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_PORT | Sets the webserver port | Yes | 53842 |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| DOCKER_NONROOT | DEPRECATED. | No | false |
| | | | |
| | | | |
| | | | |
| | Docker only: Runs the binary in the container as a non-root user, if set to "true" | | |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| TMPDIR | Sets the path which contains temporary files | No | Non-Docker: Default OS path |
| | | | |
| | | | |
| | | | |
| | | | Docker: [DATA_DIR] |
+------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| Name | Action | Persistent [*]_ | Default |
+================================+=====================================================================================+=================+=============================+
| GOKAPI_CHUNK_SIZE_MB | Sets the size of chunks that are uploaded in MB | Yes | 45 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_CONFIG_DIR | Sets the directory for the config file | No | config |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_CONFIG_FILE | Sets the name of the config file | No | config.json |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_DATA_DIR | Sets the directory for the data | Yes | data |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_DISABLE_CORS_CHECK | Disables the CORS check on startup and during setup, if set to true | No | false |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_ENABLE_HOTLINK_VIDEOS | Allow hotlinking of videos. Note: Due to buffering, playing a video might count as | No | false |
| | | | |
| | multiple downloads. It is only recommended to use video hotlinking for uploads with | | |
| | | | |
| | unlimited downloads enabled | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_GUEST_UPLOAD_BY_DEFAULT | Allows all users by default to create file requests, if set to true | No | false |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LENGTH_HOTLINK_ID | Sets the length of the hotlink IDs. Value must be 8 or greater | No | 40 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LENGTH_ID | Sets the length of the download IDs. Value must be 5 or greater | No | 15 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_LOG_STDOUT | Also outputs all log file entries to the console output, if set to true | No | false |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_FILESIZE | Sets the maximum allowed file size in MB | Yes | 102400 |
| | | | |
| | Default 102400 = 100GB | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_FILES_GUESTUPLOAD | Sets the maximum number of files that can be uploaded per file requests created by | No | 100 |
| | | | |
| | non-admin users | | |
| | | | |
| | Set to 0 to allow unlimited file count for all users | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_MEMORY_UPLOAD | Sets the amount of RAM in MB that can be allocated for an upload chunk or file | Yes | 50 |
| | | | |
| | Any chunk or file with a size greater than that will be written to a temporary file | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_PARALLEL_UPLOADS | Set the number of chunks that are uploaded in parallel for a single file | Yes | 3 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MAX_SIZE_GUESTUPLOAD | Sets the maximum file size for file requests created by | No | 10240 |
| | | | |
| | non-admin users | | |
| | | | |
| | Set to 0 to allow files with a size of up to a value set with GOKAPI_MAX_FILESIZE | | |
| | | | |
| | for all users | | |
| | | | |
| | Default 10240 = 10GB | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MIN_FREE_SPACE | Sets the minium free space on the disk in MB for accepting an upload | No | 400 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_MIN_LENGTH_PASSWORD | Sets the minium password length. Value must be 6 or greater | No | 8 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| GOKAPI_PORT | Sets the webserver port | Yes | 53842 |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| TMPDIR | Sets the path which contains temporary files | No | Non-Docker: Default OS path |
| | | | |
| | | | Docker: [DATA_DIR] |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
| DOCKER_NONROOT | DEPRECATED. | No | false |
| | | | |
| | Docker only: Runs the binary in the container as a non-root user, if set to "true" | | |
+--------------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+
.. [*] Variables that are persistent must be submitted during the first start when Gokapi creates a new config file. They can be omitted afterwards. Non-persistent variables need to be set on every start.
@@ -216,81 +230,96 @@ Migrating Redis (``127.0.0.1:6379, User: test, Password: 1234, Prefix: gokapi_,
.. _clitool:
********************************
********
CLI Tool
********************************
********
Gokapi also has a CLI tool that allows uploads from the command line. Binaries are avaible on the `Github release page <https://github.com/Forceu/Gokapi/releases>`_ for Linux, Windows and MacOS. To compile it yourself, download the repository and run ``make build-cli`` in the top directory.
The Gokapi CLI tool enables seamless file uploads and downloads directly from the command line.
Alternatively you can use the tool with Docker, although it will be slightly less user-friendly.
Installation
============
Official binaries for Linux, Windows, and macOS are available on the `GitHub releases page <https://github.com/Forceu/Gokapi/releases>`_.
To build the tool from source:
1. Download the repository.
2. Run ``make build-cli`` from the root directory.
.. note::
Gokapi v2.1.0 or newer is required for CLI functionality. For file downloads, version v2.2.0 or newer is required.
Gokapi v2.1.0 or newer is required to use the CLI tool.
Authentication
==============
Login
=================================
To begin, authenticate your session using the following command:
First you need to login with the command ``gokapi-cli login``. You will then be asked for your server URL and a valid API key with upload permission. If end-to-end encryption is enabled, you will also need to enter your encyption key. By default the login data is saved to ``gokapi-cli.json``, but you can define a different location with the ``-c`` parameter.
.. code-block:: bash
gokapi-cli login
To logout, either delete the configuration file or run ``gokapi-cli logout``.
You will be prompted to provide your server URL, an API key with upload permissions, and your end-to-end encryption key (if applicable).
* **Storage:** By default, credentials are saved in plain text to ``gokapi-cli.json``. You may specify a custom path using the ``-c`` parameter.
* **Logout:** To logout, run ``gokapi-cli logout`` or manually delete the configuration file.
.. warning::
The configuration file stores login credentials in plain text. Ensure the file is stored in a secure environment.
The configuration file contains the login data as plain text.
Docker Usage
------------
While the native binary is recommended for the best experience, the CLI can be run via Docker. By default, the configuration is stored at ``/app/config/config.json``.
Docker
---------------------------------
To persist your login session, mount a volume as shown below:
If you are using Docker, your config will be saved to ``/app/config/config.json`` by default, but the location can be changed. To login, execute the following command:
::
docker run -it --rm -v gokapi-cli-config:/app/config docker.io/f0rc3/gokapi-cli:latest login
The volume ``gokapi-cli-config:/app/config`` is not required if you re-use the container, but it is still highly recommended. If a volume is not mounted, you will need to log in again after every new container creation.
.. code-block:: bash
docker run -it --rm -v gokapi-cli-config:/app/config docker.io/f0rc3/gokapi-cli:latest login
.. _clitool-upload-file:
Uploading a file
=================================
Uploading Files
===============
To upload a file, use the ``upload`` command with the ``-f`` flag:
To upload a file, simply run ``gokapi-cli upload -f /path/to/file``. By default the files are encrypted (if enabled) and stored without any expiration. These additional parameters are available:
.. code-block:: bash
gokapi-cli upload -f /path/to/file
By default, files are encrypted (if enabled) and have no expiration date. The following parameters are available to customize the upload:
+------------------------------------+---------------------------------------------------+
| Parameter | Effect |
| Parameter | Description |
+====================================+===================================================+
| \-\-json, -j | Only outputs in JSON format, unless upload failed |
| ``--file, -f [path]`` | **(Required)** Path to the file to be uploaded. |
+------------------------------------+---------------------------------------------------+
| \-\-disable-e2e, -x | Disables end-to-end encryption for this upload |
| ``--expiry-days, -e [int]`` | Sets the file expiration in days. |
+------------------------------------+---------------------------------------------------+
| \-\-expiry-days, -e [number] | Sets the expiry date of the file in days |
| ``--expiry-downloads, -d [int]`` | Limits the number of allowed downloads. |
+------------------------------------+---------------------------------------------------+
| \-\-expiry-downloads, -d [number] | Sets the allowed downloads |
| ``--password, -p [string]`` | Protects the download with a password. |
+------------------------------------+---------------------------------------------------+
| \-\-password, -p [string] | Sets a password |
| ``--name, -n [string]`` | Assigns a custom filename on the server. |
+------------------------------------+---------------------------------------------------+
| \-\-name, -n [string] | Sets a different filename for uploaded file |
| ``--disable-e2e, -x`` | Disables end-to-end encryption for this upload. |
+------------------------------------+---------------------------------------------------+
| \-\-configuration, -c [path] | Use the configuration file specified |
| ``--json, -j`` | Returns output in JSON format (unless failed). |
+------------------------------------+---------------------------------------------------+
| ``--configuration, -c [path]`` | Uses a specific configuration file. |
+------------------------------------+---------------------------------------------------+
**Example:** Uploading the file ``/tmp/example``. It will expire in 10 days, has unlimited downloads and requires the password ``abcd``:
::
**Example:**
Upload a file that expires in 10 days, has no download limit, and is protected by the password "abcd":
.. code-block:: bash
gokapi-cli upload -f /tmp/example --expiry-days 10 --password abcd
gokapi-cli upload -f /tmp/example --expiry-days 10 --password abcd
.. warning::
To avoid race conditions, do not initiate multiple simultaneous uploads if end-to-end encryption is enabled.
If you are using end-to-end encryption, do not upload other encrypted files simultaneously to avoid race conditions.
Docker
---------------------------------
@@ -358,6 +387,35 @@ As a Docker container cannot access your host files without a volume, you will n
docker run --rm -v gokapi-cli-config:/app/config -v /tmp/another/example:/upload/ docker.io/f0rc3/gokapi-cli:latest upload-dir -n "example.zip"
Downloading Files
=================
To retrieve a file from the server, use the ``download`` command followed by the file ID. Files downloaded with the CLI tool do not increase the download count.
.. code-block:: bash
gokapi-cli download -i [FILE_ID]
Available parameters for downloads:
+------------------------------------+---------------------------------------------------+
| Parameter | Description |
+====================================+===================================================+
| ``--id, -i [id]`` | **(Required)** The unique ID of the file. |
+------------------------------------+---------------------------------------------------+
| ``--output, -o [string]`` | Renames the file upon download. |
+------------------------------------+---------------------------------------------------+
| ``--output-path, -k [path]`` | Target directory (defaults to current folder). |
+------------------------------------+---------------------------------------------------+
| ``--remove, -r`` | Deletes the file from the server after download. |
+------------------------------------+---------------------------------------------------+
**Example:**
Download the file with ID ``Eukohc6r`` to the ``/home/user/downloads`` folder and delete it from the server after a successful transfer:
.. code-block:: bash
gokapi-cli download -i Eukohc6r --output-path /home/user/downloads --remove
.. _api:
+2
View File
@@ -285,7 +285,9 @@ This option disables Gokapis internal authentication completely, except for API
- ``/apiKeys``
- ``/auth/token``
- ``/changePassword``
- ``/downloadPresigned``
- ``/e2eSetup``
- ``/filerequests``
- ``/logs``
- ``/uploadChunk``
- ``/uploadStatus``
+317 -6
View File
@@ -4,14 +4,14 @@
Usage
=====
Admin Menu
Upload Menu
================
General
----------------
After you have started the Gokapi server, you can login using the your admin credentials by going to `http(s)://your.gokapi.url/admin``
After you have started the Gokapi server, you can login using the your credentials by going to `http(s)://your.gokapi.url/admin``
There you can list and manage files and upload new files. You will also see three fields:
@@ -22,25 +22,336 @@ There you can list and manage files and upload new files. You will also see thre
Uploading new files
---------------------
To upload, drag and drop a file, folder or multiple files to the Upload Zone. You can also directly paste an image from the clipboard. If you want to change the default expiry conditions, this has to be done before uploading. For each file an entry in the table will appear with a download link.
To upload, drag and drop a file, folder or multiple files to the Upload Zone. You can also directly paste an image or text from the clipboard. If you want to change the default expiry conditions, this has to be done before uploading. For each file an entry in the table will appear with a download link.
Identical files are deduplicated, which means if you upload a file twice, it will only be stored once.
Sharing files
---------------
Once you uploaded an file, you will see the options *Copy URL* and *Copy Hotlink*. By clicking on *Copy URL*, you copy the URL for the Download page to your clipboard. A user can then download the file from that page.
Once you uploaded an file, you will see a button with the options *Copy URL* and *Copy Hotlink*. By clicking on *Copy URL*, you copy the URL for the Download page to your clipboard. A user can then download the file from that page.
If a file does not require client-side decryption, you can also use the *Copy Hotlink* button. The hotlink URL is a direct link to the file and can for example be posted as an image on a forum or on a website. Each view counts as a download. Although Gokapi sets a Header to explicitly disallow caching, some browsers or external caches may still cache the image if they are not compliant.
The second button lets you share the regular URL easily. If you are accessing Gokapi with a mobile device, a tap on the button will open your device's share menu. Otherwise you can click on the drop down element and select to either share the link via email or generate a QR code.
Downloading files
------------------
The upload menu has a button which lets you download a file without increasing the download counter. You can also click on the file ID to go to the regular download page, which increases the counter.
Editing files
---------------
By clicking on the edit button, you can change limits like the maximum download count or replace the file with the contents of a different uploaded file.
File deletion
---------------
Every hour Gokapi runs a cleanup routine which deletes all files from the storage that have been expired. If you click on the *Delete* button in the list, that file will be deleted from the disk immediately. AWS files are deleted after 24 hours, as of right now there is no proper way to find out if a download has been completed.
Every hour Gokapi runs a cleanup routine which deletes all files from the storage that have been expired. If you click on the *Delete* button in the list, that file will be deleted from the disk immediately. Unproxied AWS files are deleted after 24 hours, as of right now there is no proper way to find out if a download has been completed.
File Request Menu
===================
General
----------------
The File Requests page allows you to create secure, invitation-only upload links. These links enable external users to send files directly to your server without needing an account.
.. note::
**Security Note:** If End-to-End Encryption is enabled globally, please note that **File Requests bypass this**. All files uploaded through the upload request page will be in plain text. This does only affect servers with end-to-end encryption, regular file encryption is still in place.
Overview of the Dashboard
---------------------------
The main dashboard provides a summary of all active and expired file requests.
* **Name**: The friendly name of the request. Clicking this link opens the public upload page in a new tab.
* **Uploaded Files**: Displays the number of files currently received.
* **+X**: Indicates "active" uploads currently in progress.
* **X / Max**: Shows the current count against a set file limit.
* **Total Size**: The combined storage footprint of all files in that request.
* **Last Upload**: The date and time the most recent file was added.
* **Expiry**: When the link will stop accepting new uploads.
* **Actions**: Quick tools to manage, download, or delete the request.
Managing Files
---------------------------
Each row in the table can be expanded to view and manage individual files.
Viewing Files
^^^^^^^^^^^^^^
If a request has files, a *chevron (down arrow)* icon will appear next to the file count. Clicking this will expand a list showing:
* Individual file names.
* File sizes and upload dates.
* Direct download buttons for single files.
Downloading Content
^^^^^^^^^^^^^^^^^^^^
You can download files in two ways:
1. **Single File**: Click the file name or the download icon within the expanded list.
2. **Batch Download**: Click the download icon in the *Actions* column. If multiple files exist, the system will automatically package them into a ``.zip`` archive.
Creating and Editing Requests
---------------------------------
To create a new request, click the *Plus* icon at the top right. To modify an existing one, click the *Pencil* icon.
.. list-table::
:widths: 20 80
:header-rows: 1
* - Field
- Description
* - **Title**
- A friendly name to identify the request (e.g., "Project Assets").
* - **Max Files**
- Limit how many files users can upload to this link.
* - **Max Size**
- Set a maximum total size (in MB) for the entire request.
* - **Expiry**
- Set a date after which the link will no longer function.
* - **Notes**
- Public notes that are shown on the upload page
Sharing and Deletion
--------------------
Sharing the Request
^^^^^^^^^^^^^^^^^^^^^^
1. Locate the request in the table.
2. Click the *Copy (Clipboard)* icon.
3. A notification will confirm the URL is copied. You can now paste this into an email or chat.
Deleting Requests
^^^^^^^^^^^^^^^^^^^
To remove a request, click the *Trash* icon.
.. warning::
Deleting a File Request is permanent. This action also deletes all associated files currently stored on the server. This cannot be undone.
User Management
=================
The **Users** page provides administrators with tools to create accounts, manage permissions, and oversee user activity. This interface ensures you can delegate responsibilities while maintaining system security.
Overview of the Dashboard
----------------------------
The user table displays a high-level summary of all accounts on the server:
* **User**: The display name or username of the account.
* **Group**: The account type (e.g., "Admin" or "User").
* **Last Online**: A timestamp indicating the last time the user logged into the system.
* **Uploads**: The total number of files currently owned by that user.
* **Permissions**: A quick-view grid of icons representing specific rights.
* **Actions**: Tools to reset passwords, promote/demote ranks, or delete accounts.
Managing Permissions
---------------------
Permissions are granular and can be toggled by clicking the icons in the **Permissions** column.
.. list-table::
:widths: 30 60
:header-rows: 1
* - Name
- Description
* - Create File Requests
- Allows the user to generate external upload links.
* - Replace Own Uploads
- Allows the user to overwrite files they previously uploaded.
* - List Other Uploads
- Grant visibility to files uploaded by other system users.
* - Edit Other Uploads
- Allows editing files owned by others.
* - Delete Other Uploads
- Allows permanent removal of files owned by other users.
* - Manage Logs
- Grants access to view and clear system activity logs.
* - Manage Users
- Grants access to this User Management page.
* - Manage API Keys
- Allows management of API keys of belonging to any user
.. note::
Permissions for the Super Admin and your own account cannot be modified from this screen to prevent accidental lockouts.
User Account Actions
====================
Adding a New User
-----------------
1. Click the *Plus (+)* icon at the top right of the Users card.
2. Enter a unique username.
3. The user will be created with default permissions and will need a password assigned or reset.
Resetting Passwords
-------------------
If using internal authentication, click the *Key* icon:
* **Force Reset**: The user must choose a new password the next time they log in.
* **Generate Random**: The system provides a temporary password. You can copy it to your clipboard to give to the user.
User Ranks
------------------
There are three different user ranks:
* **Super Admin**: A single person with all access which cannot be modified by other users.
* **Admin**: Has all rights by default. Is able to delete system logs and can change file owners.
* **User**: Has less rights by default.
Changing User Rank
------------------
Use the *Chevron Up/Down* icons to change a user's group:
* **Promote**: Upgrades a standard User to an Admin.
* **Demote**: Downgrades an Admin to a standard User.
Deleting Users
--------------
Click the **Trash** icon to remove an account.
.. warning::
When deleting a user, you will be asked if you also want to **permanently delete all files** uploaded by them. If unchecked, the files will remain on the server and change the ownership to the user who initiated the deletion.
API Menu
===============
In the API menu you can create API keys, which can be used for API access. Please refer to :ref:`api`.
The API Keys page allows you to generate and manage credentials for programmatic access to the server. These keys are used to authenticate scripts, third-party applications, or CLI tools.
.. note::
For technical implementation details and endpoint definitions, please refer to the integrated API Documentation and the section :ref:`api`
Overview of API Keys
---------------------
The API table provides a summary of all active credentials:
* **Name**: A descriptive label for the key (e.g., ``Internal Upload Tool``). You can click the name at any time to rename it.
* **API Key**: A redacted version of the key for security. When a new key is created, the full string will be displayed once - ensure you copy it immediately.
* **Last Used**: The timestamp of the most recent request made using this key.
* **Permissions**: A grid of icons representing what the key is authorized to do.
* **User**: (Admin only) Displays which system user owns the specific API key.
Managing Key Permissions
--------------------------
Permissions for API keys are granular. You can enable or disable a right by clicking its corresponding icon.
.. list-table::
:widths: 30 60
:header-rows: 1
* - Name
- Description
* - List Uploads
- View a list of files currently on the server.
* - Upload
- Permission to push new files to the server.
* - Edit Uploads
- Modify metadata of existing files.
* - Delete Uploads
- Permanently remove files via the API.
* - Replace
- Overwrite existing files with new versions.
* - Download
- Retrieve file contents programmatically without increasing the download counter
* - File Requests
- Create and manage external "File Request" links.
* - Manage Users
- Create or modify user accounts via API calls.
* - Manage Keys
- Use this key to create or delete other API keys.
.. note::
Some permissions may appear greyed out. This happens if the user who owns the key does not have that specific permission assigned to their account. An API key cannot grant more power than its owner possesses.
Key Operations
----------------
Creating a New Key
^^^^^^^^^^^^^^^^^^^
1. Click the *Plus (+)* icon in the top right corner.
2. A new key will be generated.
3. **Copy the key immediately.** For security reasons, the full key cannot be displayed again once you navigate away from the page.
Deleting a Key
^^^^^^^^^^^^^^^^^^^
To revoke access immediately, click the *Trash* icon in the Actions column. Any application using this key will instantly receive an ``Unauthorized`` error.
System Logs
==========================
The **Log File** page provides a view of system activity, security events, and file operations.
Filtering Logs
-----------------
To help you find specific information quickly, you can use the *Log Filter* dropdown menu. Selecting a category will parse the log file and display only the relevant lines.
.. list-table::
:widths: 25 75
:header-rows: 1
* - Category
- Description
* - **Warning**
- Non-critical errors or alerts that may require attention.
* - **Auth**
- Login attempts, password resets, and permission changes.
* - **Download**
- Records of files being accessed or downloaded by users/guests.
* - **Upload**
- New file creations and completed upload sessions.
* - **Edit**
- Metadata changes, file renames, and setting updates.
* - **Info**
- General operational status messages.
Log Maintenance and Cleanup
------------------------------
Over time, log files can become quite large. Administrators have access to the *Delete Logs* utility to manage storage and keep the logs readable.
.. note::
The log deletion tool is restricted to users with *Administrator* privileges. For standard users, this menu will be disabled.
Retention Options
-----------------
You can clear logs based on their age using the following presets:
* **Older than 2/7/14/30 days**: Retains recent history while purging stale data.
* **Delete all logs**: Completely clears the log file.
.. warning::
Log deletion is a permanent action. Once logs are cleared, the data cannot be recovered via the web interface. It is recommended to keep at least 7 days of logs for security auditing purposes.
+8 -2
View File
@@ -19,24 +19,30 @@ require (
golang.org/x/oauth2 v0.27.0
golang.org/x/sync v0.11.0
golang.org/x/term v0.37.0
golang.org/x/time v0.14.0
gopkg.in/yaml.v3 v3.0.1
modernc.org/sqlite v1.35.0
)
require (
github.com/alicebob/gopher-json v0.0.0-20230218143504-906a9b012302 // indirect
github.com/djherbis/atime v1.1.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-jose/go-jose/v4 v4.0.5 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jmoiron/sqlx v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect
github.com/tdewolff/minify/v2 v2.24.2 // indirect
github.com/tdewolff/parse/v2 v2.8.3 // indirect
github.com/tdewolff/argp v0.0.0-20250430135133-0f54527d2b1e // indirect
github.com/tdewolff/minify/v2 v2.24.8 // indirect
github.com/tdewolff/parse/v2 v2.8.5 // indirect
github.com/yuin/gopher-lua v1.1.1 // indirect
go.shabbyrobe.org/gocovmerge v0.0.0-20230507111327-fa4f82cfbf4d // indirect
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa // indirect
+21 -10
View File
@@ -1,3 +1,4 @@
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/Kodeworks/golang-image-ico v0.0.0-20141118225523-73f0f4cfade9 h1:1ltqoej5GtaWF8jaiA49HwsZD459jqm9YFz9ZtMFpQA=
github.com/Kodeworks/golang-image-ico v0.0.0-20141118225523-73f0f4cfade9/go.mod h1:7uhhqiBaR4CpN0k9rMjOtjpcfGd6DG2m04zQxKnWQ0I=
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
@@ -20,10 +21,15 @@ github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDh
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/djherbis/atime v1.1.0 h1:rgwVbP/5by8BvvjBNrbh64Qz33idKT3pSnMSJsxhi0g=
github.com/djherbis/atime v1.1.0/go.mod h1:28OF6Y8s3NQWwacXc5eZTsEsiMzp7LF8MbXE+XJPdBE=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE=
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/gomodule/redigo v1.9.2 h1:HrutZBLhSIU8abiSfW8pj8mPhOyMYjZT/wcA4/L9L9s=
github.com/gomodule/redigo v1.9.2/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
@@ -38,6 +44,8 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/johannesboyne/gofakes3 v0.0.0-20250106100439-5c39aecd6999 h1:CMbkEl1h9JvRURFFprSbyy2f4Gf71SFz9h74iSAETGo=
github.com/johannesboyne/gofakes3 v0.0.0-20250106100439-5c39aecd6999/go.mod h1:t6osVdP++3g4v2awHz4+HFccij23BbdT1rX3W7IijqQ=
github.com/juju/ratelimit v1.0.2 h1:sRxmtRiajbvrcLQT7S+JbqU0ntsb9W2yhSdNN8tWfaI=
@@ -47,14 +55,18 @@ github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfn
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -74,10 +86,13 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tdewolff/minify/v2 v2.24.2 h1:vnY3nTulEAbCAAlxTxPPDkzG24rsq31SOzp63yT+7mo=
github.com/tdewolff/minify/v2 v2.24.2/go.mod h1:1JrCtoZXaDbqioQZfk3Jdmr0GPJKiU7c1Apmb+7tCeE=
github.com/tdewolff/parse/v2 v2.8.3 h1:5VbvtJ83cfb289A1HzRA9sf02iT8YyUwN84ezjkdY1I=
github.com/tdewolff/parse/v2 v2.8.3/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
github.com/tdewolff/argp v0.0.0-20250430135133-0f54527d2b1e h1:2jfHhbjBKS2wfyvcz5W2eOkQVKv57DKM1C/QYhTovhs=
github.com/tdewolff/argp v0.0.0-20250430135133-0f54527d2b1e/go.mod h1:xw2b1X81m4zY1OGytzHNr/YKXbf/STHkK5idoNamlYE=
github.com/tdewolff/minify/v2 v2.24.8 h1:58/VjsbevI4d5FGV0ZSuBrHMSSkH4MCH0sIz/eKIauE=
github.com/tdewolff/minify/v2 v2.24.8/go.mod h1:0Ukj0CRpo/sW/nd8uZ4ccXaV1rEVIWA3dj8U7+Shhfw=
github.com/tdewolff/parse/v2 v2.8.5 h1:ZmBiA/8Do5Rpk7bDye0jbbDUpXXbCdc3iah4VeUvwYU=
github.com/tdewolff/parse/v2 v2.8.5/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
github.com/tdewolff/test v1.0.11 h1:FdLbwQVHxqG16SlkGveC0JVyrJN62COWTRyUFzfbtBE=
github.com/tdewolff/test v1.0.11/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
@@ -88,8 +103,6 @@ go.shabbyrobe.org/gocovmerge v0.0.0-20230507111327-fa4f82cfbf4d/go.mod h1:92Uoe3
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
@@ -127,8 +140,6 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
@@ -136,8 +147,6 @@ golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuX
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -146,6 +155,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+18 -14
View File
@@ -25,8 +25,8 @@ import (
"github.com/forceu/gokapi/internal/storage/filesystem"
)
// Environment is an object containing the environment variables
var Environment environment.Environment
// parsedEnvironment is an object containing the environment variables
var parsedEnvironment environment.Environment
// ServerSettings is an object containing the server configuration
var serverSettings models.Configuration
@@ -72,14 +72,14 @@ func loadFromFile(path string) (models.Configuration, error) {
// Load loads the configuration or creates the folder structure and a default configuration
func Load() {
Environment = environment.New()
parsedEnvironment = environment.New()
// No check if file exists, as this was checked earlier
settings, err := loadFromFile(Environment.ConfigPath)
settings, err := loadFromFile(parsedEnvironment.ConfigPath)
helper.Check(err)
serverSettings = settings
usesHttps = strings.HasPrefix(strings.ToLower(serverSettings.ServerUrl), "https://")
if configupgrade.DoUpgrade(&serverSettings, &Environment) {
if configupgrade.DoUpgrade(&serverSettings, &parsedEnvironment) {
save()
}
if serverSettings.PublicName == "" {
@@ -91,12 +91,9 @@ func Load() {
if serverSettings.ChunkSize == 0 {
serverSettings.ChunkSize = 45
}
serverSettings.MinLengthPassword = Environment.MinLengthPassword
serverSettings.LengthId = Environment.LengthId
serverSettings.LengthHotlinkId = Environment.LengthHotlinkId
helper.CreateDir(serverSettings.DataDir)
filesystem.Init(serverSettings.DataDir)
logging.Init(Environment.DataDir)
logging.Init(parsedEnvironment.DataDir)
}
// ConnectDatabase loads the database that is defined in the configuration
@@ -119,7 +116,7 @@ func Get() *models.Configuration {
// Save the configuration as a json file
func save() {
file, err := os.OpenFile(Environment.ConfigPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
file, err := os.OpenFile(parsedEnvironment.ConfigPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
fmt.Println("Error writing configuration:", err)
os.Exit(1)
@@ -136,8 +133,8 @@ func save() {
// LoadFromSetup creates a new configuration file after a user completed the setup. If cloudConfig is not nil, a new
// cloud config file is created. If it is nil an existing cloud config file will be deleted.
func LoadFromSetup(config models.Configuration, cloudConfig *cloudconfig.CloudConfig, e2eConfig End2EndReconfigParameters, passwordHash string) {
Environment = environment.New()
helper.CreateDir(Environment.ConfigDir)
parsedEnvironment = environment.New()
helper.CreateDir(parsedEnvironment.ConfigDir)
serverSettings = config
if cloudConfig != nil {
@@ -172,6 +169,13 @@ func LoadFromSetup(config models.Configuration, cloudConfig *cloudconfig.CloudCo
}
}
func GetEnvironment() environment.Environment {
if !parsedEnvironment.IsParsed() {
panic("Environment is not parsed yet")
}
return parsedEnvironment
}
func deleteAllEncryptedStorage() {
files := database.GetAllMetadata()
for _, file := range files {
@@ -185,8 +189,8 @@ func deleteAllEncryptedStorage() {
// SetDeploymentPassword sets a new password. This should only be used for non-interactive deployment, but is not enforced
func SetDeploymentPassword(newPassword string) {
if len(newPassword) < serverSettings.MinLengthPassword {
fmt.Printf("Password needs to be at least %d characters long\n", serverSettings.MinLengthPassword)
if len(newPassword) < parsedEnvironment.MinLengthPassword {
fmt.Printf("Password needs to be at least %d characters long\n", parsedEnvironment.MinLengthPassword)
os.Exit(1)
}
serverSettings.Authentication.SaltAdmin = helper.GenerateRandomString(30)
+3 -4
View File
@@ -21,7 +21,7 @@ func TestMain(m *testing.M) {
func TestLoad(t *testing.T) {
test.IsEqualBool(t, Exists(), true)
Load()
test.IsEqualString(t, Environment.ConfigDir, "test")
test.IsEqualString(t, parsedEnvironment.ConfigDir, "test")
test.IsEqualString(t, serverSettings.Port, "127.0.0.1:53843")
test.IsEqualString(t, serverSettings.Authentication.Username, "test")
test.IsEqualString(t, serverSettings.ServerUrl, "http://127.0.0.1:53843/")
@@ -31,8 +31,8 @@ func TestLoad(t *testing.T) {
_ = os.Setenv("GOKAPI_LENGTH_ID", "20")
_ = os.Setenv("GOKAPI_LENGTH_HOTLINK_ID", "25")
Load()
test.IsEqualInt(t, serverSettings.LengthId, 20)
test.IsEqualInt(t, serverSettings.LengthHotlinkId, 25)
test.IsEqualInt(t, parsedEnvironment.LengthId, 20)
test.IsEqualInt(t, parsedEnvironment.LengthHotlinkId, 25)
_ = os.Unsetenv("GOKAPI_LENGTH_ID")
_ = os.Unsetenv("GOKAPI_LENGTH_HOTLINK_ID")
test.IsEqualInt(t, serverSettings.ConfigVersion, configupgrade.CurrentConfigVersion)
@@ -60,7 +60,6 @@ func TestLoadFromSetup(t *testing.T) {
ServerUrl: "serverurl",
RedirectUrl: "redirect",
ConfigVersion: configupgrade.CurrentConfigVersion,
LengthId: 10,
DataDir: "test",
MaxMemory: 10,
UseSsl: true,
+54 -6
View File
@@ -88,6 +88,10 @@ func Migrate(configOld, configNew models.DbConnection) {
dbNew.SaveHotlink(file)
}
}
requests := dbOld.GetAllFileRequests()
for _, request := range requests {
dbNew.SaveFileRequest(request)
}
dbOld.Close()
dbNew.Close()
}
@@ -132,6 +136,16 @@ func GetApiKey(id string) (models.ApiKey, bool) {
return db.GetApiKey(id)
}
// GetApiKeyByPublicKey returns an API key by using the public key
func GetApiKeyByPublicKey(publicKey string) (string, bool) {
return db.GetApiKeyByPublicKey(publicKey)
}
// GetApiKeyByFileRequest returns an API key used for a file request
func GetApiKeyByFileRequest(request models.FileRequest) (string, bool) {
return db.GetApiKeyByFileRequest(request)
}
// SaveApiKey saves the API key to the database
func SaveApiKey(apikey models.ApiKey) {
db.SaveApiKey(apikey)
@@ -147,11 +161,6 @@ func DeleteApiKey(id string) {
db.DeleteApiKey(id)
}
// GetApiKeyByPublicKey returns an API key by using the public key
func GetApiKeyByPublicKey(publicKey string) (string, bool) {
return db.GetApiKeyByPublicKey(publicKey)
}
// E2E Section
// SaveEnd2EndInfo stores the encrypted e2e info
@@ -290,7 +299,7 @@ func DeleteUser(id int) {
func GetSuperAdmin() (models.User, bool) {
users := db.GetAllUsers()
for _, user := range users {
if user.UserLevel == models.UserLevelSuperAdmin {
if user.IsSuperAdmin() {
return user, true
}
}
@@ -323,3 +332,42 @@ func EditSuperAdmin(username, passwordHash string) error {
db.SaveUser(user, false)
return nil
}
// File Requests
// GetFileRequest returns the FileRequest or false if not found
func GetFileRequest(id string) (models.FileRequest, bool) {
return db.GetFileRequest(id)
}
// GetAllFileRequests returns an array with all file requests, ordered by creation date
func GetAllFileRequests() []models.FileRequest {
return db.GetAllFileRequests()
}
// SaveFileRequest stores the file request associated with the file in the database
func SaveFileRequest(request models.FileRequest) {
db.SaveFileRequest(request)
}
// DeleteFileRequest deletes a file request with the given ID
func DeleteFileRequest(request models.FileRequest) {
db.DeleteFileRequest(request)
}
// Presigned URLs
// GetPresignedUrl returns the presigned url with the given ID or false if not a valid ID
func GetPresignedUrl(id string) (models.Presign, bool) {
return db.GetPresignedUrl(id)
}
// DeletePresignedUrl deletes the presigned url with the given ID
func DeletePresignedUrl(id string) {
db.DeletePresignedUrl(id)
}
// SavePresignedUrl saves the presigned url
func SavePresignedUrl(presign models.Presign) {
db.SavePresignedUrl(presign)
}
@@ -38,6 +38,8 @@ type Database interface {
GetAllApiKeys() map[string]models.ApiKey
// GetApiKey returns a models.ApiKey if valid or false if the ID is not valid
GetApiKey(id string) (models.ApiKey, bool)
// GetApiKeyByFileRequest returns an API key used for a file request
GetApiKeyByFileRequest(request models.FileRequest) (string, bool)
// SaveApiKey saves the API key to the database
SaveApiKey(apikey models.ApiKey)
// UpdateTimeApiKey writes the content of LastUsage to the database
@@ -97,6 +99,22 @@ type Database interface {
UpdateUserLastOnline(id int)
// DeleteUser deletes a user with the given ID
DeleteUser(id int)
// GetFileRequest returns the FileRequest or false if not found
GetFileRequest(id string) (models.FileRequest, bool)
// GetAllFileRequests returns an array with all file requests, ordered by creation date
GetAllFileRequests() []models.FileRequest
// SaveFileRequest stores the file request associated with the file in the database
SaveFileRequest(request models.FileRequest)
// DeleteFileRequest deletes a file request with the given ID
DeleteFileRequest(request models.FileRequest)
// GetPresignedUrl returns the presigned url with the given ID or false if not a valid ID
GetPresignedUrl(id string) (models.Presign, bool)
// DeletePresignedUrl deletes the presigned url with the given ID
DeletePresignedUrl(id string)
// SavePresignedUrl saves the presigned url
SavePresignedUrl(presign models.Presign)
}
// GetNew connects to the given database and initialises it
@@ -8,6 +8,7 @@ import (
"strings"
"time"
"github.com/forceu/gokapi/internal/environment"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
redigo "github.com/gomodule/redigo/redis"
@@ -20,7 +21,7 @@ type DatabaseProvider struct {
}
// DatabaseSchemeVersion contains the version number to be expected from the current database. If lower, an upgrade will be performed
const DatabaseSchemeVersion = 5
const DatabaseSchemeVersion = 6
// New returns an instance
func New(dbConfig models.DbConnection) (DatabaseProvider, error) {
@@ -117,10 +118,25 @@ func newPool(config models.DbConnection) *redigo.Pool {
func (p DatabaseProvider) Upgrade(currentDbVersion int) {
// < v2.0.0
if currentDbVersion < 5 {
fmt.Println("Error: Gokapi runs >=v2.0.0, but Database is <v2.0.0")
fmt.Println("Error: Gokapi runs >=v2.0.0, but Database is <v2.0.0. Please update to v2.0.0 first.")
osExit(1)
return
}
// < v2.2.0
if currentDbVersion < 6 {
grantUploadPerm := environment.New().PermRequestGrantedByDefault
for _, user := range p.GetAllUsers() {
if grantUploadPerm || user.IsAdmin() {
user.GrantPermission(models.UserPermGuestUploads)
p.SaveUser(user, false)
}
}
for _, apiKey := range p.GetAllApiKeys() {
if apiKey.IsSystemKey {
p.DeleteApiKey(apiKey.Id)
}
}
}
}
const keyDbVersion = "dbversion"
@@ -53,6 +53,17 @@ func (p DatabaseProvider) GetApiKeyByPublicKey(publicKey string) (string, bool)
return "", false
}
// GetApiKeyByFileRequest returns an API key used for a file request
func (p DatabaseProvider) GetApiKeyByFileRequest(request models.FileRequest) (string, bool) {
keys := p.GetAllApiKeys()
for _, key := range keys {
if key.UploadRequestId == request.Id {
return key.Id, true
}
}
return "", false
}
// SaveApiKey saves the API key to the database
func (p DatabaseProvider) SaveApiKey(apikey models.ApiKey) {
p.setHashMap(p.buildArgs(prefixApiKeys + apikey.Id).AddFlat(apikey))
@@ -0,0 +1,69 @@
package redis
import (
"cmp"
"slices"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
redigo "github.com/gomodule/redigo/redis"
)
const (
prefixFileRequests = "frq:"
)
func dbToFileRequest(input []any) (models.FileRequest, error) {
var result models.FileRequest
err := redigo.ScanStruct(input, &result)
if err != nil {
return models.FileRequest{}, err
}
return result, nil
}
// GetFileRequest returns the FileRequest or false if not found
func (p DatabaseProvider) GetFileRequest(id string) (models.FileRequest, bool) {
if id == "" {
return models.FileRequest{}, false
}
result, ok := p.getHashMap(prefixFileRequests + id)
if !ok {
return models.FileRequest{}, false
}
request, err := dbToFileRequest(result)
helper.Check(err)
return request, true
}
// GetAllFileRequests returns an array with all file requests, ordered by creation date
func (p DatabaseProvider) GetAllFileRequests() []models.FileRequest {
var result []models.FileRequest
maps := p.getAllHashesWithPrefix(prefixFileRequests)
for _, v := range maps {
request, err := dbToFileRequest(v)
helper.Check(err)
result = append(result, request)
}
return sortFilerequests(result)
}
func sortFilerequests(users []models.FileRequest) []models.FileRequest {
slices.SortFunc(users, func(a, b models.FileRequest) int {
return cmp.Or(
cmp.Compare(b.CreationDate, a.CreationDate),
cmp.Compare(a.Name, b.Name),
)
})
return users
}
// SaveFileRequest stores the file request associated with the file in the database
func (p DatabaseProvider) SaveFileRequest(request models.FileRequest) {
p.setHashMap(p.buildArgs(prefixUsers + request.Id).AddFlat(request))
}
// DeleteFileRequest deletes a file request with the given ID
func (p DatabaseProvider) DeleteFileRequest(request models.FileRequest) {
p.deleteKey(prefixFileRequests + request.Id)
}
@@ -0,0 +1,34 @@
package redis
import (
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
redigo "github.com/gomodule/redigo/redis"
)
const (
prefixPresign = "ps:"
)
// GetPresignedUrl returns the presigned url with the given ID or false if not a valid ID
func (p DatabaseProvider) GetPresignedUrl(id string) (models.Presign, bool) {
hashmapEntry, ok := p.getHashMap(prefixPresign + id)
if !ok {
return models.Presign{}, false
}
var result models.Presign
err := redigo.ScanStruct(hashmapEntry, &result)
helper.Check(err)
return result, true
}
// SavePresignedUrl saves the presigned url
func (p DatabaseProvider) SavePresignedUrl(presign models.Presign) {
p.setHashMap(p.buildArgs(prefixPresign + presign.Id).AddFlat(presign))
p.setExpiryAt(prefixPresign+presign.Id, presign.Expiry)
}
// DeletePresignedUrl deletes the presigned url with the given ID
func (p DatabaseProvider) DeletePresignedUrl(id string) {
p.deleteKey(prefixPresign + id)
}
@@ -2,12 +2,13 @@ package redis
import (
"cmp"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
redigo "github.com/gomodule/redigo/redis"
"slices"
"strconv"
"time"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
redigo "github.com/gomodule/redigo/redis"
)
const (
@@ -33,10 +34,10 @@ func (p DatabaseProvider) GetAllUsers() []models.User {
helper.Check(err)
result = append(result, user)
}
return orderUsers(result)
return sortUsers(result)
}
func orderUsers(users []models.User) []models.User {
func sortUsers(users []models.User) []models.User {
slices.SortFunc(users, func(a, b models.User) int {
return cmp.Or(
cmp.Compare(a.UserLevel, b.UserLevel),
@@ -7,9 +7,11 @@ import (
"os"
"path/filepath"
"github.com/forceu/gokapi/internal/environment"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
// Required for sqlite driver
// Required for the sqlite driver
_ "modernc.org/sqlite"
)
@@ -19,7 +21,7 @@ type DatabaseProvider struct {
}
// DatabaseSchemeVersion contains the version number to be expected from the current database. If lower, an upgrade will be performed
const DatabaseSchemeVersion = 11
const DatabaseSchemeVersion = 12
// New returns an instance
func New(dbConfig models.DbConnection) (DatabaseProvider, error) {
@@ -35,15 +37,53 @@ func (p DatabaseProvider) GetType() int {
func (p DatabaseProvider) Upgrade(currentDbVersion int) {
// < v2.0.0
if currentDbVersion < 10 {
fmt.Println("Error: Gokapi runs >=v2.0.0, but Database is <v2.0.0")
fmt.Println("Error: Gokapi runs >=v2.0.0, but Database is <v2.0.0. Please update to v2.0.0 first.")
osExit(1)
return
}
// pre local TZ
// < v2.2.0-dev
if currentDbVersion < 11 {
err := p.rawSqlite("ALTER TABLE FileMetaData DROP COLUMN ExpireAtString;")
helper.Check(err)
}
// < v2.2.0
if currentDbVersion < 12 {
err := p.rawSqlite(`ALTER TABLE FileMetaData ADD COLUMN "UploadRequestId" TEXT NOT NULL DEFAULT '';
ALTER TABLE ApiKeys ADD COLUMN "UploadRequestId" TEXT NOT NULL DEFAULT '';
CREATE TABLE "UploadRequests" (
"id" TEXT NOT NULL UNIQUE,
"name" TEXT NOT NULL,
"userid" INTEGER NOT NULL,
"expiry" INTEGER NOT NULL,
"maxFiles" INTEGER NOT NULL,
"maxSize" INTEGER NOT NULL,
"creation" INTEGER NOT NULL,
"apiKey" TEXT NOT NULL UNIQUE,
"note" TEXT NOT NULL,
PRIMARY KEY("id")
);
CREATE TABLE "Presign" (
"id" TEXT NOT NULL UNIQUE,
"fileIds" TEXT NOT NULL,
"expiry" INTEGER NOT NULL,
"filename" TEXT NOT NULL,
PRIMARY KEY("id")
);`)
helper.Check(err)
grantUploadPerm := environment.New().PermRequestGrantedByDefault
for _, user := range p.GetAllUsers() {
if grantUploadPerm || user.IsAdmin() {
user.GrantPermission(models.UserPermGuestUploads)
p.SaveUser(user, false)
}
}
for _, apiKey := range p.GetAllApiKeys() {
if apiKey.IsSystemKey {
p.DeleteApiKey(apiKey.Id)
}
}
}
}
// GetDbVersion gets the version number of the database
@@ -61,7 +101,7 @@ func (p DatabaseProvider) SetDbVersion(newVersion int) {
helper.Check(err)
}
// GetSchemaVersion returns the version number, that the database should be if fully upgraded
// GetSchemaVersion returns the version number, which the database should be at if fully upgraded
func (p DatabaseProvider) GetSchemaVersion() int {
return DatabaseSchemeVersion
}
@@ -114,6 +154,7 @@ func (p DatabaseProvider) Close() {
func (p DatabaseProvider) RunGarbageCollection() {
p.cleanExpiredSessions()
p.cleanApiKeys()
p.cleanPresignedUrls()
}
func (p DatabaseProvider) createNewDatabase() error {
@@ -126,6 +167,7 @@ func (p DatabaseProvider) createNewDatabase() error {
"IsSystemKey" INTEGER,
"UserId" INTEGER NOT NULL,
"PublicId" TEXT NOT NULL UNIQUE ,
"UploadRequestId" TEXT NOT NULL,
PRIMARY KEY("Id")
) WITHOUT ROWID;
CREATE TABLE "E2EConfig" (
@@ -153,6 +195,7 @@ func (p DatabaseProvider) createNewDatabase() error {
"UserId" INTEGER NOT NULL,
"UploadDate" INTEGER NOT NULL,
"PendingDeletion" INTEGER NOT NULL,
"UploadRequestId" TEXT NOT NULL,
PRIMARY KEY("Id")
);
CREATE TABLE "Hotlinks" (
@@ -177,6 +220,25 @@ func (p DatabaseProvider) createNewDatabase() error {
"ResetPassword" INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY("Id" AUTOINCREMENT)
);
CREATE TABLE "UploadRequests" (
"id" TEXT NOT NULL UNIQUE,
"name" TEXT,
"userid" INTEGER NOT NULL,
"expiry" INTEGER NOT NULL,
"maxFiles" INTEGER NOT NULL,
"maxSize" INTEGER NOT NULL,
"creation" INTEGER NOT NULL,
"apiKey" TEXT NOT NULL UNIQUE,
"note" TEXT NOT NULL,
PRIMARY KEY("id")
);
CREATE TABLE "Presign" (
"id" TEXT NOT NULL UNIQUE,
"fileIds" TEXT NOT NULL,
"expiry" INTEGER NOT NULL,
"filename" TEXT NOT NULL,
PRIMARY KEY("id")
);
`
err := p.rawSqlite(sqlStmt)
if err != nil {
@@ -10,14 +10,15 @@ import (
)
type schemaApiKeys struct {
Id string
FriendlyName string
LastUsed int64
Permissions int
Expiry int64
IsSystemKey int
UserId int
PublicId string
Id string
FriendlyName string
LastUsed int64
Permissions int
Expiry int64
IsSystemKey int
UserId int
PublicId string
UploadRequestId string
}
// currentTime is used in order to modify the current time for testing purposes in unit tests
@@ -34,17 +35,19 @@ func (p DatabaseProvider) GetAllApiKeys() map[string]models.ApiKey {
defer rows.Close()
for rows.Next() {
rowData := schemaApiKeys{}
err = rows.Scan(&rowData.Id, &rowData.FriendlyName, &rowData.LastUsed, &rowData.Permissions, &rowData.Expiry, &rowData.IsSystemKey, &rowData.UserId, &rowData.PublicId)
err = rows.Scan(&rowData.Id, &rowData.FriendlyName, &rowData.LastUsed, &rowData.Permissions, &rowData.Expiry,
&rowData.IsSystemKey, &rowData.UserId, &rowData.PublicId, &rowData.UploadRequestId)
helper.Check(err)
result[rowData.Id] = models.ApiKey{
Id: rowData.Id,
PublicId: rowData.PublicId,
FriendlyName: rowData.FriendlyName,
LastUsed: rowData.LastUsed,
Permissions: models.ApiPermission(rowData.Permissions),
Expiry: rowData.Expiry,
IsSystemKey: rowData.IsSystemKey == 1,
UserId: rowData.UserId,
Id: rowData.Id,
PublicId: rowData.PublicId,
FriendlyName: rowData.FriendlyName,
LastUsed: rowData.LastUsed,
Permissions: models.ApiPermission(rowData.Permissions),
Expiry: rowData.Expiry,
IsSystemKey: rowData.IsSystemKey == 1,
UserId: rowData.UserId,
UploadRequestId: rowData.UploadRequestId,
}
}
return result
@@ -54,7 +57,8 @@ func (p DatabaseProvider) GetAllApiKeys() map[string]models.ApiKey {
func (p DatabaseProvider) GetApiKey(id string) (models.ApiKey, bool) {
var rowResult schemaApiKeys
row := p.sqliteDb.QueryRow("SELECT * FROM ApiKeys WHERE Id = ?", id)
err := row.Scan(&rowResult.Id, &rowResult.FriendlyName, &rowResult.LastUsed, &rowResult.Permissions, &rowResult.Expiry, &rowResult.IsSystemKey, &rowResult.UserId, &rowResult.PublicId)
err := row.Scan(&rowResult.Id, &rowResult.FriendlyName, &rowResult.LastUsed, &rowResult.Permissions, &rowResult.Expiry,
&rowResult.IsSystemKey, &rowResult.UserId, &rowResult.PublicId, &rowResult.UploadRequestId)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return models.ApiKey{}, false
@@ -64,14 +68,15 @@ func (p DatabaseProvider) GetApiKey(id string) (models.ApiKey, bool) {
}
result := models.ApiKey{
Id: rowResult.Id,
PublicId: rowResult.PublicId,
FriendlyName: rowResult.FriendlyName,
LastUsed: rowResult.LastUsed,
Permissions: models.ApiPermission(rowResult.Permissions),
Expiry: rowResult.Expiry,
IsSystemKey: rowResult.IsSystemKey == 1,
UserId: rowResult.UserId,
Id: rowResult.Id,
PublicId: rowResult.PublicId,
FriendlyName: rowResult.FriendlyName,
LastUsed: rowResult.LastUsed,
Permissions: models.ApiPermission(rowResult.Permissions),
Expiry: rowResult.Expiry,
IsSystemKey: rowResult.IsSystemKey == 1,
UserId: rowResult.UserId,
UploadRequestId: rowResult.UploadRequestId,
}
return result, true
@@ -92,14 +97,29 @@ func (p DatabaseProvider) GetApiKeyByPublicKey(publicKey string) (string, bool)
return rowResult.Id, true
}
// GetApiKeyByFileRequest returns an API key used for a file request
func (p DatabaseProvider) GetApiKeyByFileRequest(request models.FileRequest) (string, bool) {
var rowResult schemaApiKeys
row := p.sqliteDb.QueryRow("SELECT Id FROM ApiKeys WHERE UploadRequestId = ? LIMIT 1", request.Id)
err := row.Scan(&rowResult.Id)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return "", false
}
helper.Check(err)
return "", false
}
return rowResult.Id, true
}
// SaveApiKey saves the API key to the database
func (p DatabaseProvider) SaveApiKey(apikey models.ApiKey) {
isSystemKey := 0
if apikey.IsSystemKey {
isSystemKey = 1
}
_, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO ApiKeys (Id, FriendlyName, LastUsed, Permissions, Expiry, IsSystemKey, UserId, PublicId) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
apikey.Id, apikey.FriendlyName, apikey.LastUsed, apikey.Permissions, apikey.Expiry, isSystemKey, apikey.UserId, apikey.PublicId)
_, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO ApiKeys (Id, FriendlyName, LastUsed, Permissions, Expiry, IsSystemKey, UserId, PublicId, UploadRequestId) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
apikey.Id, apikey.FriendlyName, apikey.LastUsed, apikey.Permissions, apikey.Expiry, isSystemKey, apikey.UserId, apikey.PublicId, apikey.UploadRequestId)
helper.Check(err)
}
@@ -0,0 +1,107 @@
package sqlite
import (
"database/sql"
"errors"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
)
type schemaFileRequests struct {
Id string
Name string
UserId int
Expiry int64
MaxFiles int
MaxSize int
Creation int64
ApiKey string
Note string
}
// GetFileRequest returns the FileRequest or false if not found
func (p DatabaseProvider) GetFileRequest(id string) (models.FileRequest, bool) {
if id == "" {
return models.FileRequest{}, false
}
var rowResult schemaFileRequests
row := p.sqliteDb.QueryRow("SELECT * FROM UploadRequests WHERE Id = ?", id)
err := row.Scan(&rowResult.Id, &rowResult.Name, &rowResult.UserId, &rowResult.Expiry,
&rowResult.MaxFiles, &rowResult.MaxSize, &rowResult.Creation, &rowResult.ApiKey, &rowResult.Note)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return models.FileRequest{}, false
}
helper.Check(err)
return models.FileRequest{}, false
}
result := models.FileRequest{
Id: rowResult.Id,
Name: rowResult.Name,
UserId: rowResult.UserId,
MaxFiles: rowResult.MaxFiles,
MaxSize: rowResult.MaxSize,
Expiry: rowResult.Expiry,
CreationDate: rowResult.Creation,
ApiKey: rowResult.ApiKey,
Notes: rowResult.Note,
}
return result, true
}
// GetAllFileRequests returns an array with all file requests, ordered by creation date
func (p DatabaseProvider) GetAllFileRequests() []models.FileRequest {
result := make([]models.FileRequest, 0)
rows, err := p.sqliteDb.Query("SELECT * FROM UploadRequests ORDER BY Creation DESC, Name")
helper.Check(err)
defer rows.Close()
for rows.Next() {
rowData := schemaFileRequests{}
err = rows.Scan(&rowData.Id, &rowData.Name, &rowData.UserId, &rowData.Expiry, &rowData.MaxFiles,
&rowData.MaxSize, &rowData.Creation, &rowData.ApiKey, &rowData.Note)
helper.Check(err)
result = append(result, models.FileRequest{
Id: rowData.Id,
Name: rowData.Name,
UserId: rowData.UserId,
MaxFiles: rowData.MaxFiles,
MaxSize: rowData.MaxSize,
Expiry: rowData.Expiry,
CreationDate: rowData.Creation,
ApiKey: rowData.ApiKey,
Notes: rowData.Note,
})
}
return result
}
// SaveFileRequest stores the file request associated with the file in the database
func (p DatabaseProvider) SaveFileRequest(request models.FileRequest) {
newData := schemaFileRequests{
Id: request.Id,
Name: request.Name,
UserId: request.UserId,
MaxFiles: request.MaxFiles,
MaxSize: request.MaxSize,
Expiry: request.Expiry,
Creation: request.CreationDate,
ApiKey: request.ApiKey,
Note: request.Notes,
}
_, err := p.sqliteDb.Exec(`INSERT OR REPLACE INTO UploadRequests
(id, name, userid, expiry, maxFiles, maxSize, creation, apiKey, note)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
newData.Id, newData.Name, newData.UserId, newData.Expiry, newData.MaxFiles, newData.MaxSize, newData.Creation, newData.ApiKey, newData.Note)
helper.Check(err)
}
// DeleteFileRequest deletes a file request with the given ID
func (p DatabaseProvider) DeleteFileRequest(request models.FileRequest) {
if request.Id == "" {
return
}
_, err := p.sqliteDb.Exec("DELETE FROM UploadRequests WHERE Id = ?", request.Id)
helper.Check(err)
}
@@ -29,6 +29,7 @@ type schemaMetaData struct {
UserId int
UploadDate int64
PendingDeletion int64
UploadRequestId string
}
func (rowData schemaMetaData) ToFileModel() (models.File, error) {
@@ -51,6 +52,7 @@ func (rowData schemaMetaData) ToFileModel() (models.File, error) {
UserId: rowData.UserId,
UploadDate: rowData.UploadDate,
PendingDeletion: rowData.PendingDeletion,
UploadRequestId: rowData.UploadRequestId,
}
buf := bytes.NewBuffer(rowData.Encryption)
@@ -68,9 +70,9 @@ func (p DatabaseProvider) GetAllMetadata() map[string]models.File {
for rows.Next() {
rowData := schemaMetaData{}
err = rows.Scan(&rowData.Id, &rowData.Name, &rowData.Size, &rowData.SHA1, &rowData.ExpireAt, &rowData.SizeBytes,
&rowData.DownloadsRemaining, &rowData.DownloadCount, &rowData.PasswordHash, &rowData.HotlinkId,
&rowData.ContentType, &rowData.AwsBucket, &rowData.Encryption, &rowData.UnlimitedDownloads,
&rowData.UnlimitedTime, &rowData.UserId, &rowData.UploadDate, &rowData.PendingDeletion)
&rowData.DownloadsRemaining, &rowData.DownloadCount, &rowData.PasswordHash, &rowData.HotlinkId, &rowData.ContentType,
&rowData.AwsBucket, &rowData.Encryption, &rowData.UnlimitedDownloads, &rowData.UnlimitedTime, &rowData.UserId,
&rowData.UploadDate, &rowData.PendingDeletion, &rowData.UploadRequestId)
helper.Check(err)
var metaData models.File
metaData, err = rowData.ToFileModel()
@@ -87,9 +89,10 @@ func (p DatabaseProvider) GetMetaDataById(id string) (models.File, bool) {
row := p.sqliteDb.QueryRow("SELECT * FROM FileMetaData WHERE Id = ?", id)
err := row.Scan(&rowData.Id, &rowData.Name, &rowData.Size, &rowData.SHA1, &rowData.ExpireAt, &rowData.SizeBytes,
&rowData.DownloadsRemaining, &rowData.DownloadCount, &rowData.PasswordHash, &rowData.HotlinkId,
&rowData.ContentType, &rowData.AwsBucket, &rowData.Encryption, &rowData.UnlimitedDownloads,
&rowData.UnlimitedTime, &rowData.UserId, &rowData.UploadDate, &rowData.PendingDeletion)
&rowData.DownloadsRemaining, &rowData.DownloadCount, &rowData.PasswordHash,
&rowData.HotlinkId, &rowData.ContentType, &rowData.AwsBucket, &rowData.Encryption,
&rowData.UnlimitedDownloads, &rowData.UnlimitedTime, &rowData.UserId, &rowData.UploadDate,
&rowData.PendingDeletion, &rowData.UploadRequestId)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return result, false
@@ -120,6 +123,7 @@ func (p DatabaseProvider) SaveMetaData(file models.File) {
UserId: file.UserId,
UploadDate: file.UploadDate,
PendingDeletion: file.PendingDeletion,
UploadRequestId: file.UploadRequestId,
}
if file.UnlimitedDownloads {
@@ -137,12 +141,12 @@ func (p DatabaseProvider) SaveMetaData(file models.File) {
_, err = p.sqliteDb.Exec(`INSERT OR REPLACE INTO FileMetaData (Id, Name, Size, SHA1, ExpireAt, SizeBytes,
DownloadsRemaining, DownloadCount, PasswordHash, HotlinkId, ContentType, AwsBucket, Encryption,
UnlimitedDownloads, UnlimitedTime, UserId, UploadDate, PendingDeletion)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
newData.Id, newData.Name, newData.Size, newData.SHA1, newData.ExpireAt, newData.SizeBytes, newData.DownloadsRemaining,
newData.DownloadCount, newData.PasswordHash, newData.HotlinkId, newData.ContentType, newData.AwsBucket,
newData.Encryption, newData.UnlimitedDownloads, newData.UnlimitedTime, newData.UserId, newData.UploadDate,
newData.PendingDeletion)
UnlimitedDownloads, UnlimitedTime, UserId, UploadDate, PendingDeletion, UploadRequestId)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)`,
newData.Id, newData.Name, newData.Size, newData.SHA1, newData.ExpireAt, newData.SizeBytes,
newData.DownloadsRemaining, newData.DownloadCount, newData.PasswordHash, newData.HotlinkId, newData.ContentType,
newData.AwsBucket, newData.Encryption, newData.UnlimitedDownloads, newData.UnlimitedTime, newData.UserId, newData.UploadDate,
newData.PendingDeletion, newData.UploadRequestId)
helper.Check(err)
}
@@ -0,0 +1,56 @@
package sqlite
import (
"database/sql"
"errors"
"strings"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
)
type schemaPresign struct {
Id string
FileId string
Expiry int64
Filename string
}
// GetPresignedUrl returns the presigned url with the given ID or false if not a valid ID
func (p DatabaseProvider) GetPresignedUrl(id string) (models.Presign, bool) {
var rowResult schemaPresign
row := p.sqliteDb.QueryRow("SELECT * FROM Presign WHERE id = ?", id)
err := row.Scan(&rowResult.Id, &rowResult.FileId, &rowResult.Expiry, &rowResult.Filename)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return models.Presign{}, false
}
helper.Check(err)
return models.Presign{}, false
}
result := models.Presign{
Id: rowResult.Id,
FileIds: strings.Split(rowResult.FileId, ","),
Expiry: rowResult.Expiry,
Filename: rowResult.Filename,
}
return result, true
}
// SavePresignedUrl saves the presigned url
func (p DatabaseProvider) SavePresignedUrl(presign models.Presign) {
_, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO Presign (id, fileIds, expiry, filename) VALUES (?, ?, ?, ?)",
presign.Id, strings.Join(presign.FileIds, ","), presign.Expiry, presign.Filename)
helper.Check(err)
}
// DeletePresignedUrl deletes the presigned url with the given ID
func (p DatabaseProvider) DeletePresignedUrl(id string) {
_, err := p.sqliteDb.Exec("DELETE FROM Presign WHERE id = ?", id)
helper.Check(err)
}
func (p DatabaseProvider) cleanPresignedUrls() {
_, err := p.sqliteDb.Exec("DELETE FROM Presign WHERE expiry < ?", currentTime().Unix())
helper.Check(err)
}
@@ -3,9 +3,10 @@ package sqlite
import (
"database/sql"
"errors"
"time"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"time"
)
type schemaUser struct {
@@ -37,7 +38,7 @@ func (s schemaUser) ToUser() models.User {
// GetAllUsers returns a map with all users
func (p DatabaseProvider) GetAllUsers() []models.User {
var result []models.User
rows, err := p.sqliteDb.Query("SELECT * FROM Users ORDER BY Userlevel ASC, LastOnline DESC, Name ASC")
rows, err := p.sqliteDb.Query("SELECT * FROM Users ORDER BY Userlevel , LastOnline DESC, Name ")
helper.Check(err)
defer rows.Close()
for rows.Next() {
@@ -6,4 +6,4 @@ package setup
// protectedUrls contains a list of URLs that need to be protected if authentication is disabled.
// This list will be displayed during the setup
var protectedUrls = []string{"/admin", "/apiKeys", "/auth/token", "/changePassword", "/e2eSetup", "/logs", "/uploadChunk", "/uploadStatus", "/users"}
var protectedUrls = []string{"/admin", "/apiKeys", "/auth/token", "/changePassword", "/downloadPresigned", "/e2eSetup", "/filerequests", "/logs", "/uploadChunk", "/uploadStatus", "/users"}
-2
View File
@@ -241,11 +241,9 @@ func toConfiguration(formObjects *[]jsonFormObject) (models.Configuration, *clou
result := models.Configuration{
MaxFileSizeMB: parsedEnv.MaxFileSize,
LengthId: parsedEnv.LengthId,
MaxMemory: parsedEnv.MaxMemory,
DataDir: parsedEnv.DataDir,
MaxParallelUploads: parsedEnv.MaxParallelUploads,
MinLengthPassword: parsedEnv.MinLengthPassword,
ChunkSize: parsedEnv.ChunkSizeMB,
ConfigVersion: configupgrade.CurrentConfigVersion,
Authentication: models.AuthenticationConfig{},
@@ -147,7 +147,6 @@ var config = models.Configuration{
ServerUrl: "",
RedirectUrl: "",
ConfigVersion: 0,
LengthId: 0,
DataDir: "",
MaxMemory: 0,
UseSsl: false,
+6 -5
View File
@@ -8,14 +8,15 @@ import (
"encoding/hex"
"errors"
"fmt"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/secure-io/sio-go"
"golang.org/x/crypto/scrypt"
"io"
"log"
"os"
"time"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/secure-io/sio-go"
"golang.org/x/crypto/scrypt"
)
// NoEncryption means all files are stored in plaintext
@@ -172,7 +173,7 @@ func DecryptReader(encInfo models.EncryptionInfo, input io.Reader, output io.Wri
return err
}
// IsCorrectKey checks if correct key is being used. This does not check for complete file authentication.
// IsCorrectKey checks if the correct key is being used. This does not check for complete file authentication.
func IsCorrectKey(encInfo models.EncryptionInfo, input *os.File) bool {
_, err := createDecryptReader(encInfo, input)
if err != nil {
+15
View File
@@ -38,6 +38,16 @@ type Environment struct {
// Sets the amount of RAM in MB that can be allocated for an upload chunk or file
// Any chunk or file with a size greater than that will be written to a temporary file
MaxMemory int `env:"MAX_MEMORY_UPLOAD" envDefault:"50" onlyPositive:"true" persistent:"true"`
// Sets the maximum number of files that can be uploaded per file requests created by
// non-admin users
// Set to 0 to allow unlimited file count for all users
MaxFilesGuestUpload int `env:"MAX_FILES_GUESTUPLOAD" envDefault:"100" onlyPositive:"true"`
// Sets the maximum file size for file requests created by
// non-admin users
// Set to 0 to allow files with a size of up to a value set with GOKAPI_MAX_FILESIZE
// for all users
// Default 10240 = 10GB
MaxSizeGuestUploadMb int `env:"MAX_SIZE_GUESTUPLOAD" envDefault:"10240" onlyPositive:"true"`
// Set the number of chunks that are uploaded in parallel for a single file
MaxParallelUploads int `env:"MAX_PARALLEL_UPLOADS" envDefault:"3" onlyPositive:"true" persistent:"true"`
// Sets the minium free space on the disk in MB for accepting an upload
@@ -48,6 +58,8 @@ type Environment struct {
WebserverPort int `env:"PORT" envDefault:"53842" onlyPositive:"true" persistent:"true"`
// Disables the CORS check on startup and during setup, if set to true
DisableCorsCheck bool `env:"DISABLE_CORS_CHECK" envDefault:"false"`
// Allows all users by default to create file requests, if set to true
PermRequestGrantedByDefault bool `env:"GUEST_UPLOAD_BY_DEFAULT" envDefault:"false"`
// Also outputs all log file entries to the console output, if set to true
LogToStdout bool `env:"LOG_STDOUT" envDefault:"false"`
// Allow hotlinking of videos. Note: Due to buffering, playing a video might count as
@@ -172,6 +184,9 @@ func enforceIntLimits(result *Environment) error {
func parseFlags(result Environment) Environment {
flags := flagparser.ParseFlags()
if flags.IsPortSet {
if flags.Port < 1 {
flags.Port = DefaultPort
}
result.WebserverPort = flags.Port
}
if flags.IsConfigDirSet {
+21 -2
View File
@@ -115,8 +115,12 @@ func LogDownload(file models.File, r *http.Request, saveIp bool) {
}
// LogUpload adds a log entry when an upload was created. Non-Blocking
func LogUpload(file models.File, user models.User) {
createLogEntry(categoryUpload, fmt.Sprintf("%s, ID %s, uploaded by %s (user #%d)", file.Name, file.Id, user.Name, user.Id), false)
func LogUpload(file models.File, user models.User, fr models.FileRequest) {
if fr.Id != "" {
createLogEntry(categoryUpload, fmt.Sprintf("%s, ID %s, uploaded to file request %s (%s), owned by %s (user #%d) ", file.Name, file.Id, fr.Id, fr.Name, user.Name, user.Id), false)
} else {
createLogEntry(categoryUpload, fmt.Sprintf("%s, ID %s, uploaded by %s (user #%d)", file.Name, file.Id, user.Name, user.Id), false)
}
}
// LogEdit adds a log entry when an upload was edited. Non-Blocking
@@ -124,6 +128,21 @@ func LogEdit(file models.File, user models.User) {
createLogEntry(categoryEdit, fmt.Sprintf("%s, ID %s, edited by %s (user #%d)", file.Name, file.Id, user.Name, user.Id), false)
}
// LogCreateFileRequest adds a log entry when a file request was added. Non-Blocking
func LogCreateFileRequest(fr models.FileRequest, user models.User) {
createLogEntry(categoryEdit, fmt.Sprintf("File request %s (%s) created by %s (user #%d)", fr.Id, fr.Name, user.Name, user.Id), false)
}
// LogEditFileRequest adds a log entry when a file request was edited. Non-Blocking
func LogEditFileRequest(fr models.FileRequest, user models.User) {
createLogEntry(categoryEdit, fmt.Sprintf("File request %s (%s) created by %s (user #%d)", fr.Id, fr.Name, user.Name, user.Id), false)
}
// LogDeleteFileRequest adds a log entry when a file request was deleted. Non-Blocking
func LogDeleteFileRequest(fr models.FileRequest, user models.User) {
createLogEntry(categoryEdit, fmt.Sprintf("File request %s (%s) and associated files deleted by %s (user #%d)", fr.Id, fr.Name, user.Name, user.Id), false)
}
// LogReplace adds a log entry when an upload was replaced. Non-Blocking
func LogReplace(originalFile, newContent models.File, user models.User) {
createLogEntry(categoryEdit, fmt.Sprintf("%s, ID %s had content replaced with %s (ID %s) by %s (user #%d)",
+37 -17
View File
@@ -22,32 +22,33 @@ const (
ApiPermManageUsers
// ApiPermManageLogs is the permission required for managing the log file PERM_MANAGE_LOGS
ApiPermManageLogs
// ApiPermManageFileRequests is the permission required for creating and managing file requests PERM_MANAGE_FILE_REQUESTS
ApiPermManageFileRequests
// ApiPermDownload is the permission required for downloading stored files without increasing the counter PERM_DOWNLOAD
ApiPermDownload
)
// ApiPermNone means no permission granted
const ApiPermNone ApiPermission = 0
// ApiPermAll means all permission granted
const ApiPermAll ApiPermission = 255
// ApiPermDefault means all permission granted, except ApiPermApiMod, ApiPermManageUsers, ApiPermManageLogs and ApiPermReplace
// This is the default for new API keys that are created from the UI
const ApiPermDefault = ApiPermAll - ApiPermApiMod - ApiPermManageUsers - ApiPermReplace - ApiPermManageLogs
// ApiPermDefault the default for new API keys that are created from the UI
const ApiPermDefault = ApiPermView + ApiPermUpload + ApiPermDelete + ApiPermEdit
// ApiKey contains data of a single api key
type ApiKey struct {
Id string `json:"Id" redis:"Id"`
PublicId string `json:"PublicId" redis:"PublicId"`
FriendlyName string `json:"FriendlyName" redis:"FriendlyName"`
LastUsed int64 `json:"LastUsed" redis:"LastUsed"`
Permissions ApiPermission `json:"Permissions" redis:"Permissions"`
Expiry int64 `json:"Expiry" redis:"Expiry"` // Does not expire if 0
IsSystemKey bool `json:"IsSystemKey" redis:"IsSystemKey"`
UserId int `json:"UserId" redis:"UserId"`
Id string `json:"Id" redis:"Id"`
PublicId string `json:"PublicId" redis:"PublicId"`
FriendlyName string `json:"FriendlyName" redis:"FriendlyName"`
LastUsed int64 `json:"LastUsed" redis:"LastUsed"`
Permissions ApiPermission `json:"Permissions" redis:"Permissions"`
Expiry int64 `json:"Expiry" redis:"Expiry"` // Does not expire if 0
IsSystemKey bool `json:"IsSystemKey" redis:"IsSystemKey"`
UserId int `json:"UserId" redis:"UserId"`
UploadRequestId string `json:"UploadRequestId" redis:"UploadRequestId"`
}
// ApiPermission contains zero or more permissions as an uint8 format
type ApiPermission uint8
// ApiPermission contains zero or more permissions as an uint16 format
type ApiPermission uint16
func ApiPermissionFromString(permString string) (ApiPermission, error) {
switch strings.ToUpper(permString) {
@@ -67,6 +68,10 @@ func ApiPermissionFromString(permString string) (ApiPermission, error) {
return ApiPermManageUsers, nil
case "PERM_MANAGE_LOGS":
return ApiPermManageLogs, nil
case "PERM_MANAGE_FILE_REQUESTS":
return ApiPermManageFileRequests, nil
case "PERM_DOWNLOAD":
return ApiPermDownload, nil
default:
return 0, errors.New("invalid permission")
}
@@ -135,9 +140,24 @@ func (key *ApiKey) HasPermissionManageLogs() bool {
return key.HasPermission(ApiPermManageLogs)
}
// ApiKeyOutput is the output that is used after a new key is created
// HasPermissionManageFileRequests returns true if ApiPermManageFileRequests is granted
func (key *ApiKey) HasPermissionManageFileRequests() bool {
return key.HasPermission(ApiPermManageFileRequests)
}
// HasPermissionDownload returns true if ApiPermDownload is granted
func (key *ApiKey) HasPermissionDownload() bool {
return key.HasPermission(ApiPermDownload)
}
// ApiKeyOutput is the output used after a new key is created
type ApiKeyOutput struct {
Result string
Id string
PublicId string
}
// IsUploadRequestKey returns true if it is used for file requests
func (key *ApiKey) IsUploadRequestKey() bool {
return key.UploadRequestId != ""
}
-15
View File
@@ -127,21 +127,6 @@ func TestApiPermAllNoApiMod(t *testing.T) {
}
}
func TestApiPermAll(t *testing.T) {
key := &ApiKey{}
key.GrantPermission(ApiPermAll)
if !key.HasPermission(ApiPermView) ||
!key.HasPermission(ApiPermUpload) ||
!key.HasPermission(ApiPermDelete) ||
!key.HasPermission(ApiPermApiMod) ||
!key.HasPermission(ApiPermEdit) ||
!key.HasPermission(ApiPermReplace) ||
!key.HasPermission(ApiPermManageUsers) ||
!key.HasPermission(ApiPermManageLogs) {
t.Errorf("expected all permissions to be set")
}
}
// Helper function to check only one permission is set
func checkOnlyPermissionSet(t *testing.T, key *ApiKey, perm ApiPermission) {
allPermissions := []struct {
-3
View File
@@ -19,9 +19,6 @@ type Configuration struct {
MaxMemory int `json:"MaxMemory"`
ChunkSize int `json:"ChunkSize"`
MaxParallelUploads int `json:"MaxParallelUploads"`
LengthId int `json:"-"`
LengthHotlinkId int `json:"-"`
MinLengthPassword int `json:"-"`
Encryption Encryption `json:"Encryption"`
UseSsl bool `json:"UseSsl"`
PicturesAlwaysLocal bool `json:"PicturesAlwaysLocal"`
+10 -13
View File
@@ -18,19 +18,16 @@ var testConfig = Configuration{
OAuthClientId: "",
OAuthClientSecret: "",
},
Port: ":12345",
ServerUrl: "https://testserver.com/",
RedirectUrl: "https://test.com",
DatabaseUrl: "sqlite://./test/gokapitest.sqlite",
ConfigVersion: 14,
LengthId: 5,
LengthHotlinkId: 10,
DataDir: "test",
MaxMemory: 50,
UseSsl: true,
MaxFileSizeMB: 20,
MinLengthPassword: 8,
PublicName: "public-name",
Port: ":12345",
ServerUrl: "https://testserver.com/",
RedirectUrl: "https://test.com",
DatabaseUrl: "sqlite://./test/gokapitest.sqlite",
ConfigVersion: 14,
DataDir: "test",
MaxMemory: 50,
UseSsl: true,
MaxFileSizeMB: 20,
PublicName: "public-name",
Encryption: Encryption{
Level: 1,
Cipher: []byte{0x00},
+16 -4
View File
@@ -19,12 +19,13 @@ type File struct {
HotlinkId string `json:"HotlinkId" redis:"HotlinkId"` // If file is a picture file and can be hotlinked, this is the ID for the hotlink
ContentType string `json:"ContentType" redis:"ContentType"` // The MIME type for the file
AwsBucket string `json:"AwsBucket" redis:"AwsBucket"` // If the file is stored in the cloud, this is the bucket that is being used
UploadRequestId string `json:"FileRequestId" redis:"FileRequestId"` // If the file belongs to a file request, this is the ID of the file request
ExpireAt int64 `json:"ExpireAt" redis:"ExpireAt"` // UTC timestamp of file expiry
PendingDeletion int64 `json:"PendingDeletion" redis:"PendingDeletion"` // UTC timestamp when the file will be deleted, if pending. Otherwise 0
SizeBytes int64 `json:"SizeBytes" redis:"SizeBytes"` // Filesize in bytes
UploadDate int64 `json:"UploadDate" redis:"UploadDate"` // UTC timestamp of upload time
DownloadsRemaining int `json:"DownloadsRemaining" redis:"DownloadsRemaining"` // The remaining downloads for this file
DownloadCount int `json:"DownloadCount" redis:"DownloadCount"` // The amount of times the file has been downloaded
DownloadCount int `json:"DownloadCount" redis:"DownloadCount"` // The number of times the file has been downloaded
UserId int `json:"UserId" redis:"UserId"` // The user ID of the uploader
Encryption EncryptionInfo `json:"Encryption" redis:"-"` // If the file is encrypted, this stores all info for decrypting
UnlimitedDownloads bool `json:"UnlimitedDownloads" redis:"UnlimitedDownloads"` // True if the uploader did not limit the downloads
@@ -42,6 +43,7 @@ type FileApiOutput struct {
ExpireAtString string `json:"ExpireAtString"` // Time expiry in a human-readable format in UTC
UrlDownload string `json:"UrlDownload"` // The public download URL for the file
UrlHotlink string `json:"UrlHotlink"` // The public hotlink URL for the file
FileRequestId string `json:"FileRequestId"` // The ID of the file request
UploadDate int64 `json:"UploadDate"` // UTC timestamp of upload time
ExpireAt int64 `json:"ExpireAt"` // UTC timestamp of file expiry
SizeBytes int64 `json:"SizeBytes"` // Filesize in bytes
@@ -55,6 +57,7 @@ type FileApiOutput struct {
IsPasswordProtected bool `json:"IsPasswordProtected"` // True if a password has to be entered before downloading the file
IsSavedOnLocalStorage bool `json:"IsSavedOnLocalStorage"` // True if the file does not use cloud storage
IsPendingDeletion bool `json:"IsPendingDeletion"` // True if the file is about to be deleted
IsFileRequest bool `json:"IsFileRequest"` // True if the file belongs to a file request
UploaderId int `json:"UploaderId"` // The user ID of the uploader
}
@@ -83,6 +86,7 @@ func (f *File) ToFileApiOutput(serverUrl string, useFilenameInUrl bool) (FileApi
if err != nil {
return FileApiOutput{}, err
}
result.IsFileRequest = f.UploadRequestId != ""
result.IsPasswordProtected = f.PasswordHash != ""
result.IsEncrypted = f.Encryption.IsEncrypted
result.IsSavedOnLocalStorage = f.AwsBucket == ""
@@ -90,10 +94,13 @@ func (f *File) ToFileApiOutput(serverUrl string, useFilenameInUrl bool) (FileApi
result.RequiresClientSideDecryption = true
}
result.IsEndToEndEncrypted = f.Encryption.IsEndToEndEncrypted
result.UrlHotlink = getHotlinkUrl(result, serverUrl, useFilenameInUrl)
result.UrlDownload = getDownloadUrl(result, serverUrl, useFilenameInUrl)
result.UploaderId = f.UserId
if !f.IsFileRequest() {
result.UrlHotlink = getHotlinkUrl(result, serverUrl, useFilenameInUrl)
result.UrlDownload = getDownloadUrl(result, serverUrl, useFilenameInUrl)
result.UploaderId = f.UserId
}
result.IsPendingDeletion = f.IsPendingForDeletion()
result.FileRequestId = f.UploadRequestId
result.ExpireAtString = time.Unix(f.ExpireAt, 0).UTC().Format("2006-01-02 15:04:05")
return result, nil
@@ -145,6 +152,11 @@ func (f *File) RequiresClientDecryption() bool {
}
return !f.IsLocalStorage() || f.Encryption.IsEndToEndEncrypted
}
// IsFileRequest checks if the file is uploaded for an upload request
func (f *File) IsFileRequest() bool {
return f.UploadRequestId != ""
}
func errorAsJson(err error) string {
fmt.Println(err)
return "{\"Result\":\"error\",\"ErrorMessage\":\"" + err.Error() + "\"}"
+2 -2
View File
@@ -32,8 +32,8 @@ func TestToJsonResult(t *testing.T) {
UnlimitedTime: true,
PendingDeletion: 100,
}
test.IsEqualString(t, file.ToJsonResult("serverurl/", false), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAtString":"2025-06-25 11:48:28","UrlDownload":"serverurl/d?id=testId","UrlHotlink":"","UploadDate":1748180908,"ExpireAt":1750852108,"SizeBytes":10,"DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":true,"IsEncrypted":true,"IsEndToEndEncrypted":false,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false,"IsPendingDeletion":true,"UploaderId":2},"IncludeFilename":false}`)
test.IsEqualString(t, file.ToJsonResult("serverurl/", true), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAtString":"2025-06-25 11:48:28","UrlDownload":"serverurl/d/testId/testName","UrlHotlink":"","UploadDate":1748180908,"ExpireAt":1750852108,"SizeBytes":10,"DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":true,"IsEncrypted":true,"IsEndToEndEncrypted":false,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false,"IsPendingDeletion":true,"UploaderId":2},"IncludeFilename":true}`)
test.IsEqualString(t, file.ToJsonResult("serverurl/", false), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAtString":"2025-06-25 11:48:28","UrlDownload":"serverurl/d?id=testId","UrlHotlink":"","FileRequestId":"","UploadDate":1748180908,"ExpireAt":1750852108,"SizeBytes":10,"DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":true,"IsEncrypted":true,"IsEndToEndEncrypted":false,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false,"IsPendingDeletion":true,"IsFileRequest":false,"UploaderId":2},"IncludeFilename":false}`)
test.IsEqualString(t, file.ToJsonResult("serverurl/", true), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAtString":"2025-06-25 11:48:28","UrlDownload":"serverurl/d/testId/testName","UrlHotlink":"","FileRequestId":"","UploadDate":1748180908,"ExpireAt":1750852108,"SizeBytes":10,"DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":true,"IsEncrypted":true,"IsEndToEndEncrypted":false,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false,"IsPendingDeletion":true,"IsFileRequest":false,"UploaderId":2},"IncludeFilename":true}`)
}
func TestIsLocalStorage(t *testing.T) {
+94
View File
@@ -0,0 +1,94 @@
package models
import (
"strings"
"time"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/storage/chunking/chunkreservation"
)
type FileRequest struct {
Id string `json:"id" redis:"id"` // The internal ID of the file request
UserId int `json:"userid" redis:"userid"` // The user ID of the owner
MaxFiles int `json:"maxfiles" redis:"maxfiles"` // The maximum number of files allowed
MaxSize int `json:"maxsize" redis:"maxsize"` // The maximum file size allowed in MB
Expiry int64 `json:"expiry" redis:"expiry"` // The expiry time of the file request
CreationDate int64 `json:"creationdate" redis:"creationdate"` // The timestamp of the file request creation
Name string `json:"name" redis:"name"` // The given name for the file request
ApiKey string `json:"apikey" redis:"apikey"` // The API key related to the file request
Notes string `json:"notes" redis:"notes"` // The custom note that was set for this file request
UploadedFiles int `json:"uploadedfiles" redis:"-"` // Contains the number of uploaded files for this request. Needs to be calculated with Populate()
CombinedMaxSize int `json:"combinedmaxsize" redis:"-"` // The lesser of MaxSize and the server's max upload size. Needs to be calculated with Populate()
ReservedUploads int `json:"reserveduploads" redis:"-"` // How many uploads are currently reserved but not finalised. Needs to be calculated with Populate()
LastUpload int64 `json:"lastupload" redis:"-"` // Contains the timestamp of the last upload for this request. Needs to be calculated with Populate()
TotalFileSize int64 `json:"totalfilesize" redis:"-"` // Contains the file size of all uploaded files. Needs to be calculated with Populate()
FileIdList []string `json:"fileidlist" redis:"-"` // Contains an array of the IDs of all uploaded files. Needs to be calculated with Populate()
Files []File `json:"-" redis:"-"` // Contains an array of the IDs of all uploaded files. Needs to be calculated with Populate()
}
// Populate inserts the number of uploaded files and the last upload date
func (f *FileRequest) Populate(files map[string]File, maxServerSize int) {
f.FileIdList = make([]string, 0)
f.Files = make([]File, 0)
for _, file := range files {
if file.UploadRequestId == f.Id {
f.TotalFileSize = f.TotalFileSize + file.SizeBytes
f.FileIdList = append(f.FileIdList, file.Id)
f.Files = append(f.Files, file)
if file.UploadDate > f.LastUpload {
f.LastUpload = file.UploadDate
}
}
}
f.CombinedMaxSize = f.MaxSize
if f.MaxSize == 0 || f.MaxSize > maxServerSize {
f.CombinedMaxSize = maxServerSize
}
f.UploadedFiles = len(f.FileIdList)
f.ReservedUploads = chunkreservation.GetCount(f.Id)
}
// GetReadableDateLastUpdate returns the last update date as YYYY-MM-DD HH:MM:SS
func (f *FileRequest) GetReadableDateLastUpdate() string {
if f.LastUpload == 0 {
return "None"
}
return time.Unix(f.LastUpload, 0).Format("2006-01-02 15:04:05")
}
func (f *FileRequest) GetReadableTotalSize() string {
return helper.ByteCountSI(f.TotalFileSize)
}
func (f *FileRequest) GetFilesAsString() string {
return strings.Join(f.FileIdList, ",")
}
func (f *FileRequest) IsUnlimitedSize() bool {
return f.MaxSize == 0
}
func (f *FileRequest) IsUnlimitedFiles() bool {
return f.MaxFiles == 0
}
func (f *FileRequest) IsUnlimitedTime() bool {
return f.Expiry == 0
}
func (f *FileRequest) IsExpired() bool {
return !f.IsUnlimitedTime() && time.Now().Unix() > f.Expiry
}
func (f *FileRequest) HasRestrictions() bool {
return !(f.IsUnlimitedSize() && f.IsUnlimitedFiles() && f.IsUnlimitedTime())
}
func (f *FileRequest) FilesRemaining() int {
result := f.MaxFiles - f.UploadedFiles - f.ReservedUploads
if result < 0 {
return 0
}
return result
}
+3 -2
View File
@@ -1,7 +1,7 @@
package models
// UploadRequest is used to set an upload request
type UploadRequest struct {
// UploadParameters is used to set parameters for a new upload
type UploadParameters struct {
UserId int
AllowedDownloads int
Expiry int
@@ -13,4 +13,5 @@ type UploadRequest struct {
IsEndToEndEncrypted bool
Password string
ExternalUrl string
FileRequestId string
}
+8
View File
@@ -0,0 +1,8 @@
package models
type Presign struct {
Id string
FileIds []string
Expiry int64
Filename string
}
+23 -11
View File
@@ -34,7 +34,7 @@ func (u *User) GetReadableUserLevel() string {
}
}
// ToJson returns the user as a JSon object
// ToJson returns the user as a JSON object
func (u *User) ToJson() string {
result, err := json.Marshal(u)
helper.Check(err)
@@ -50,7 +50,7 @@ const UserLevelAdmin UserRank = 1
// UserLevelUser indicates that this user has only basic permissions by default
const UserLevelUser UserRank = 2
// UserRank indicates the rank that is assigned to the user
// UserRank indicates the rank assigned to the user
type UserRank uint8
// IsSuperAdmin returns true if the user has the Rank UserLevelSuperAdmin
@@ -58,35 +58,42 @@ func (u *User) IsSuperAdmin() bool {
return u.UserLevel == UserLevelSuperAdmin
}
// IsAdmin returns true if the user has the Rank UserLevelSuperAdmin or UserLevelAdmin
func (u *User) IsAdmin() bool {
return u.UserLevel == UserLevelAdmin || u.UserLevel == UserLevelSuperAdmin
}
// IsSameUser returns true, if the user has the same ID
func (u *User) IsSameUser(userId int) bool {
return u.Id == userId
}
const (
// UserPermReplaceUploads allows to replace uploads
// UserPermReplaceUploads allows replacing uploads PERM_REPLACE
UserPermReplaceUploads UserPermission = 1 << iota
// UserPermListOtherUploads allows to also list uploads by other users
// UserPermListOtherUploads allows also listing uploads by other users PERM_LIST
UserPermListOtherUploads
// UserPermEditOtherUploads allows editing of uploads by other users
// UserPermEditOtherUploads allows editing of uploads by other users PERM_EDIT
UserPermEditOtherUploads
// UserPermReplaceOtherUploads allows replacing of uploads by other users
// UserPermReplaceOtherUploads allows replacing of uploads by other users PERM_REPLACE_OTHER
UserPermReplaceOtherUploads
// UserPermDeleteOtherUploads allows deleting uploads by other users
// UserPermDeleteOtherUploads allows deleting uploads by other users PERM_DELETE
UserPermDeleteOtherUploads
// UserPermManageLogs allows viewing and deleting logs
// UserPermManageLogs allows viewing and deleting logs PERM_LOGS
UserPermManageLogs
// UserPermManageApiKeys allows editing and deleting of API keys by other users
// UserPermManageApiKeys allows editing and deleting of API keys by other users PERM_API
UserPermManageApiKeys
// UserPermManageUsers allows creating and editing of users, including granting and revoking permissions
// UserPermManageUsers allows creating and editing of users, including granting and revoking permissions PERM_USERS
UserPermManageUsers
// UserPermGuestUploads allows creating file requests PERM_GUEST_UPLOAD
UserPermGuestUploads
)
// UserPermissionNone means that the user has no permissions
const UserPermissionNone UserPermission = 0
// UserPermissionAll means that the user has all permissions
const UserPermissionAll UserPermission = 255
const UserPermissionAll UserPermission = 511
// GrantPermission grants one or more permissions
func (u *User) GrantPermission(permission UserPermission) {
@@ -145,3 +152,8 @@ func (u *User) HasPermissionManageApi() bool {
func (u *User) HasPermissionManageUsers() bool {
return u.HasPermission(UserPermManageUsers)
}
// HasPermissionCreateFileRequests returns true if the user has the permission UserPermGuestUploads
func (u *User) HasPermissionCreateFileRequests() bool {
return u.HasPermission(UserPermGuestUploads)
}
+1 -1
View File
@@ -249,5 +249,5 @@ func TestUser_ToJson(t *testing.T) {
Password: "1234",
ResetPassword: true,
}
test.IsEqualString(t, user.ToJson(), `{"id":4,"name":"Test User","permissions":255,"userLevel":1,"lastOnline":1337,"resetPassword":true}`)
test.IsEqualString(t, user.ToJson(), `{"id":4,"name":"Test User","permissions":511,"userLevel":1,"lastOnline":1337,"resetPassword":true}`)
}
+177 -26
View File
@@ -5,6 +5,7 @@ Serving and processing uploaded files
*/
import (
"archive/zip"
"bytes"
"crypto/sha1"
"encoding/hex"
@@ -16,7 +17,6 @@ import (
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
"time"
@@ -37,19 +37,25 @@ import (
"github.com/jinzhu/copier"
)
// ErrorFileTooLarge is an error that is called when a file larger than the set maximum is uploaded
// ErrorFileTooLarge is an error which is raised when a file larger than the set maximum is uploaded
var ErrorFileTooLarge = errors.New("upload limit exceeded")
// ErrorChunkTooSmall is an error which is raised when a chunk is smaller than 5MB
var ErrorChunkTooSmall = errors.New("chunk is too small")
// ErrorReplaceE2EFile is caused when an end-to-end encrypted file is replaced
var ErrorReplaceE2EFile = errors.New("end-to-end encrypted files cannot be replaced")
// ErrorFileNotFound is raised when an invalid ID is passed or the file has expired
var ErrorFileNotFound = errors.New("file not found")
// ErrorInvalidPresign is raised when an invalid presign key has been passed or it has expired
var ErrorInvalidPresign = errors.New("invalid presign")
// NewFile creates a new file in the system. Called after an upload from the API has been completed. If a file with the same sha1 hash
// already exists, it is deduplicated. This function gathers information about the file, creates an ID and saves
// it into the global configuration. It is now only used by the API, the web UI uses NewFileFromChunk
func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, userId int, uploadRequest models.UploadRequest) (models.File, error) {
func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, userId int, uploadRequest models.UploadParameters) (models.File, error) {
if !isAllowedFileSize(fileHeader.Size) {
return models.File{}, ErrorFileTooLarge
}
@@ -150,7 +156,7 @@ func GetUploadCounts() map[int]int {
// NewFileFromChunk creates a new file in the system after a chunk upload has fully completed. If a file with the same sha1 hash
// already exists, it is deduplicated. This function gathers information about the file, creates an ID and saves
// it into the global configuration.
func NewFileFromChunk(chunkId string, fileHeader chunking.FileHeader, userId int, uploadRequest models.UploadRequest) (models.File, error) {
func NewFileFromChunk(chunkId string, fileHeader chunking.FileHeader, userId int, uploadRequest models.UploadParameters) (models.File, error) {
file, err := chunking.GetFileByChunkId(chunkId)
if err != nil {
return models.File{}, err
@@ -287,7 +293,7 @@ func encryptChunkFile(file *os.File, metadata *models.File) (*os.File, error) {
return tempFileEnc, nil
}
func createNewMetaData(hash string, fileHeader chunking.FileHeader, userId int, uploadRequest models.UploadRequest) models.File {
func createNewMetaData(hash string, fileHeader chunking.FileHeader, userId int, params models.UploadParameters) models.File {
file := models.File{
Id: createNewId(),
Name: fileHeader.Filename,
@@ -295,17 +301,18 @@ func createNewMetaData(hash string, fileHeader chunking.FileHeader, userId int,
Size: helper.ByteCountSI(fileHeader.Size),
SizeBytes: fileHeader.Size,
ContentType: fileHeader.ContentType,
ExpireAt: uploadRequest.ExpiryTimestamp,
ExpireAt: params.ExpiryTimestamp,
UploadDate: time.Now().Unix(),
DownloadsRemaining: uploadRequest.AllowedDownloads,
UnlimitedTime: uploadRequest.UnlimitedTime,
UnlimitedDownloads: uploadRequest.UnlimitedDownload,
PasswordHash: configuration.HashPassword(uploadRequest.Password, true),
DownloadsRemaining: params.AllowedDownloads,
UnlimitedTime: params.UnlimitedTime,
UnlimitedDownloads: params.UnlimitedDownload,
PasswordHash: configuration.HashPassword(params.Password, true),
UserId: userId,
UploadRequestId: params.FileRequestId,
}
if uploadRequest.IsEndToEndEncrypted {
if params.IsEndToEndEncrypted {
file.Encryption = models.EncryptionInfo{IsEndToEndEncrypted: true, IsEncrypted: true}
file.Size = helper.ByteCountSI(uploadRequest.RealSize)
file.Size = helper.ByteCountSI(params.RealSize)
}
if isEncryptionRequested() {
file.Encryption.IsEncrypted = true
@@ -321,7 +328,7 @@ func createNewMetaData(hash string, fileHeader chunking.FileHeader, userId int,
// createNewId returns a random ID
func createNewId() string {
return helper.GenerateRandomString(configuration.Get().LengthId)
return helper.GenerateRandomString(configuration.GetEnvironment().LengthId)
}
func getEncInfoFromExistingFile(hash string) (models.EncryptionInfo, bool) {
@@ -393,7 +400,7 @@ func isChangeRequested(parametersToChange, parameter int) bool {
}
// DuplicateFile creates a copy of an existing file with new parameters
func DuplicateFile(file models.File, parametersToChange int, newFileName string, fileParameters models.UploadRequest) (models.File, error) {
func DuplicateFile(file models.File, parametersToChange int, newFileName string, fileParameters models.UploadParameters) (models.File, error) {
// apiDuplicateFile expects fileParameters.IsEndToEndEncrypted and fileParameters.RealSize not to be used,
// change in apiDuplicateFile if using in this function!
@@ -516,7 +523,7 @@ func AddHotlink(file *models.File) {
if !IsAbleHotlink(*file) {
return
}
link := helper.GenerateRandomString(configuration.Get().LengthHotlinkId) + getFileExtension(file.Name)
link := helper.GenerateRandomString(configuration.GetEnvironment().LengthHotlinkId) + getFileExtension(file.Name)
file.HotlinkId = link
database.SaveHotlink(*file)
}
@@ -600,18 +607,20 @@ func GetFileByHotlink(id string) (models.File, bool) {
}
// ServeFile subtracts a download allowance and serves the file to the browser
func ServeFile(file models.File, w http.ResponseWriter, r *http.Request, forceDownload bool) {
file.DownloadsRemaining = file.DownloadsRemaining - 1
file.DownloadCount = file.DownloadCount + 1
database.IncreaseDownloadCount(file.Id, !file.UnlimitedDownloads)
func ServeFile(file models.File, w http.ResponseWriter, r *http.Request, forceDownload, increaseCounter, forceDecryption bool) {
if increaseCounter {
file.DownloadsRemaining = file.DownloadsRemaining - 1
file.DownloadCount = file.DownloadCount + 1
database.IncreaseDownloadCount(file.Id, !file.UnlimitedDownloads)
go sse.PublishDownloadCount(file)
}
logging.LogDownload(file, r, configuration.Get().SaveIp)
go sse.PublishDownloadCount(file)
if !file.IsLocalStorage() {
// If non-blocking, we are not setting a download complete status as there is no reliable way to
// confirm that the file has been completely downloaded. It expires automatically after 24 hours.
statusId := downloadstatus.SetDownload(file)
isBlocking, err := aws.ServeFile(w, r, file, forceDownload)
isBlocking, err := aws.ServeFile(w, r, file, forceDownload, forceDecryption)
// TODO chances are high that an error is returned here, we should consider proper output
helper.Check(err)
if isBlocking {
@@ -619,29 +628,108 @@ func ServeFile(file models.File, w http.ResponseWriter, r *http.Request, forceDo
}
return
}
fileData, size := getFileHandler(file, configuration.Get().DataDir)
fileData, _ := getFileHandler(file, configuration.Get().DataDir)
if file.Encryption.IsEncrypted && !file.RequiresClientDecryption() {
if !encryption.IsCorrectKey(file.Encryption, fileData) {
w.Write([]byte("Internal error - Error decrypting file, source data might be damaged or an incorrect key has been used"))
_, _ = w.Write([]byte("Internal error - Error decrypting file, source data might be damaged or an incorrect key has been used"))
return
}
}
statusId := downloadstatus.SetDownload(file)
headers.Write(file, w, forceDownload)
headers.Write(file, w, forceDownload, false)
if file.Encryption.IsEncrypted && !file.RequiresClientDecryption() {
err := encryption.DecryptReader(file.Encryption, fileData, w)
if err != nil {
w.Write([]byte("Error decrypting file"))
_, _ = w.Write([]byte("Error decrypting file"))
fmt.Println(err)
return
}
} else {
w.Header().Set("Content-Length", strconv.FormatInt(size, 10))
http.ServeContent(w, r, file.Name, time.Now(), fileData)
}
downloadstatus.SetComplete(statusId)
}
// Returns the filename if unique or a new filename in the format "Name (x).ext"
func makeFilenameUnique(filename string, nameMap *map[string]bool) string {
ext := filepath.Ext(filename)
base := strings.TrimSuffix(filename, ext)
if !(*nameMap)[filename] {
(*nameMap)[filename] = true
return filename
}
count := 2
for {
newName := fmt.Sprintf("%s (%d)%s", base, count, ext)
if !(*nameMap)[newName] {
(*nameMap)[newName] = true
return newName
}
count++
}
}
func ServeFilesAsZip(files []models.File, filename string, w http.ResponseWriter, r *http.Request) {
if filename == "" {
filename = "Gokapi"
}
w.Header().Set("Content-Type", "application/zip")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.zip\"", filename))
w.WriteHeader(http.StatusOK)
saveIp := configuration.Get().SaveIp
zipWriter := zip.NewWriter(w)
defer zipWriter.Close()
filenames := make(map[string]bool)
for _, file := range files {
file.Name = makeFilenameUnique(file.Name, &filenames)
header := &zip.FileHeader{
Name: file.Name,
Method: zip.Store,
Modified: time.Unix(file.UploadDate, 0),
}
entryWriter, err := zipWriter.CreateHeader(header)
helper.Check(err)
logging.LogDownload(file, r, saveIp)
if !file.IsLocalStorage() {
statusId := downloadstatus.SetDownload(file)
err = aws.Stream(entryWriter, file)
helper.Check(err)
downloadstatus.SetComplete(statusId)
_ = zipWriter.Flush()
flushingWriter, ok := w.(http.Flusher)
if ok {
flushingWriter.Flush()
}
continue
}
fileData, _ := getFileHandler(file, configuration.Get().DataDir)
statusId := downloadstatus.SetDownload(file)
if file.Encryption.IsEncrypted {
if !encryption.IsCorrectKey(file.Encryption, fileData) {
_, _ = w.Write([]byte("Internal error - Error decrypting file, source data might be damaged or an incorrect key has been used"))
return
}
err = encryption.DecryptReader(file.Encryption, fileData, entryWriter)
if err != nil {
_, _ = w.Write([]byte("Error decrypting file"))
fmt.Println(err)
return
}
} else {
_, err = io.Copy(entryWriter, fileData)
helper.Check(err)
}
downloadstatus.SetComplete(statusId)
_ = zipWriter.Flush()
flushingWriter, ok := w.(http.Flusher)
if ok {
flushingWriter.Flush()
}
}
}
func getFileHandler(file models.File, dataDir string) (*os.File, int64) {
storageData, err := os.OpenFile(dataDir+"/"+file.SHA1, os.O_RDONLY, 0644)
helper.Check(err)
@@ -703,6 +791,8 @@ func CleanUp(periodic bool) {
}
cleanOldTempFiles()
cleanHotlinks()
cleanInvalidApiKeys()
cleanInvalidFileRequests()
database.RunGarbageCollection()
if periodic {
@@ -715,6 +805,55 @@ func CleanUp(periodic bool) {
}
}
func getUserMap() map[int]models.User {
result := make(map[int]models.User)
users := database.GetAllUsers()
for _, user := range users {
result[user.Id] = user
}
return result
}
// cleanInvalidApiKeys removes all API keys that are not associated with a user anymore
// Normally this should not be a problem, but if a user was manually deleted from the database,
// this could cause issues otherwise.
func cleanInvalidApiKeys() {
users := getUserMap()
for _, apiKey := range database.GetAllApiKeys() {
_, exists := users[apiKey.UserId]
if !exists {
database.DeleteApiKey(apiKey.Id)
continue
}
if apiKey.IsUploadRequestKey() {
_, exists = database.GetFileRequest(apiKey.UploadRequestId)
if !exists {
database.DeleteApiKey(apiKey.Id)
}
}
}
}
// cleanInvalidFileRequests removes file requests and the associated files from the database if their associated owner is not a valid user.
// Normally this should not be a problem, but if a user was manually deleted from the database,
// this could cause issues otherwise.
func cleanInvalidFileRequests() {
users := getUserMap()
for _, fileRequest := range database.GetAllFileRequests() {
_, exists := users[fileRequest.UserId]
if !exists {
files := database.GetAllMetadata()
for _, file := range files {
if file.UploadRequestId == fileRequest.Id {
}
DeleteFile(file.Id, true)
}
database.DeleteFileRequest(fileRequest)
}
}
}
// cleanHotlinks removes hotlinks from the database where the file has expired
func cleanHotlinks() {
hotlinks := database.GetAllHotlinks()
@@ -817,6 +956,18 @@ func DeleteFile(fileId string, deleteSource bool) bool {
return true
}
// DeleteFiles deletes multiple files at once. This avoids race conditions when CleanUp is called multiple times
// deleteSource forces a clean-up and will delete the source if it is not
// used by a different file
func DeleteFiles(files []models.File, deleteSource bool) {
for _, file := range files {
DeleteFile(file.Id, false)
}
if deleteSource {
go CleanUp(false)
}
}
// DeleteFileSchedule schedules a file for deletion after a specified delay and optionally deletes its source.
// Returns true if scheduling is successful, false otherwise.
func DeleteFileSchedule(fileId string, delayMs int, deleteSource bool) bool {
+13 -13
View File
@@ -153,13 +153,13 @@ func TestAddHotlink(t *testing.T) {
type testFile struct {
File models.File
Request models.UploadRequest
Request models.UploadParameters
Header multipart.FileHeader
UserId int
Content []byte
}
func createRawTestFile(content []byte) (multipart.FileHeader, models.UploadRequest) {
func createRawTestFile(content []byte) (multipart.FileHeader, models.UploadParameters) {
os.Setenv("TZ", "UTC")
mimeHeader := make(textproto.MIMEHeader)
mimeHeader.Set("Content-Disposition", "form-data; name=\"file\"; filename=\"test.dat\"")
@@ -169,7 +169,7 @@ func createRawTestFile(content []byte) (multipart.FileHeader, models.UploadReque
Header: mimeHeader,
Size: int64(len(content)),
}
request := models.UploadRequest{
request := models.UploadParameters{
AllowedDownloads: 1,
Expiry: 999,
ExpiryTimestamp: 2147483600,
@@ -191,7 +191,7 @@ func createTestFile() (testFile, error) {
}, err
}
func createTestChunk() (string, chunking.FileHeader, models.UploadRequest, error) {
func createTestChunk() (string, chunking.FileHeader, models.UploadParameters, error) {
content := []byte("This is a file for chunk testing purposes")
header, request := createRawTestFile(content)
chunkId := helper.GenerateRandomString(15)
@@ -202,7 +202,7 @@ func createTestChunk() (string, chunking.FileHeader, models.UploadRequest, error
}
err := os.WriteFile("test/data/chunk-"+chunkId, content, 0600)
if err != nil {
return "", chunking.FileHeader{}, models.UploadRequest{}, err
return "", chunking.FileHeader{}, models.UploadParameters{}, err
}
return chunkId, fileheader, request, nil
}
@@ -260,7 +260,7 @@ func TestNewFile(t *testing.T) {
Header: mimeHeader,
Size: int64(20) * 1024 * 1024,
}
request = models.UploadRequest{
request = models.UploadParameters{
AllowedDownloads: 1,
Expiry: 999,
ExpiryTimestamp: 2147483600,
@@ -293,7 +293,7 @@ func TestNewFile(t *testing.T) {
Header: mimeHeader,
Size: int64(50) * 1024 * 1024,
}
request = models.UploadRequest{
request = models.UploadParameters{
AllowedDownloads: 1,
Expiry: 999,
ExpiryTimestamp: 2147483600,
@@ -351,7 +351,7 @@ func TestNewFile(t *testing.T) {
Header: mimeHeader,
Size: int64(20) * 1024 * 1024,
}
request = models.UploadRequest{
request = models.UploadParameters{
AllowedDownloads: 1,
Expiry: 999,
ExpiryTimestamp: 2147483600,
@@ -464,7 +464,7 @@ func TestDuplicateFile(t *testing.T) {
retrievedFile.DownloadCount = 5
database.SaveMetaData(retrievedFile)
newFile, err := DuplicateFile(retrievedFile, 0, "123", models.UploadRequest{})
newFile, err := DuplicateFile(retrievedFile, 0, "123", models.UploadParameters{})
test.IsNil(t, err)
test.IsEqualInt(t, newFile.DownloadCount, 0)
test.IsEqualInt(t, newFile.DownloadsRemaining, 1)
@@ -474,7 +474,7 @@ func TestDuplicateFile(t *testing.T) {
test.IsEqualBool(t, newFile.UnlimitedTime, false)
test.IsEqualString(t, newFile.Name, "test.dat")
uploadRequest := models.UploadRequest{
uploadRequest := models.UploadParameters{
AllowedDownloads: 5,
Expiry: 5,
ExpiryTimestamp: 200000,
@@ -573,7 +573,7 @@ func TestServeFile(t *testing.T) {
test.IsEqualBool(t, result, true)
r := httptest.NewRequest("GET", "/", nil)
w := httptest.NewRecorder()
ServeFile(file, w, r, true)
ServeFile(file, w, r, true, true, false)
_, result = GetFile(idNewFile)
test.IsEqualBool(t, result, false)
@@ -594,7 +594,7 @@ func TestServeFile(t *testing.T) {
w = httptest.NewRecorder()
file, result = GetFile("awsTest1234567890123")
test.IsEqualBool(t, result, true)
ServeFile(file, w, r, false)
ServeFile(file, w, r, false, true, false)
if aws.IsMockApi {
test.ResponseBodyContains(t, w, "https://redirect.url")
} else {
@@ -619,7 +619,7 @@ func TestServeFile(t *testing.T) {
file.Encryption.Nonce = nonce
r = httptest.NewRequest("GET", "/", nil)
w = httptest.NewRecorder()
ServeFile(file, w, r, true)
ServeFile(file, w, r, true, true, false)
test.ResponseBodyContains(t, w, "Error decrypting file")
}
+8
View File
@@ -186,6 +186,14 @@ func GetFileByChunkId(id string) (*os.File, error) {
return file, nil
}
// DeleteChunk deletes the chunk file
func DeleteChunk(id string) error {
if id == "" {
return errors.New("empty chunk id provided")
}
return os.Remove(getChunkFilePath(sanitiseUuid(id)))
}
// FileExists returns true if a file exists for the given chunk ID
func FileExists(id string) bool {
exists, err := helper.FileExists(getChunkFilePath(id))
@@ -0,0 +1,92 @@
package chunkreservation
import (
"sync"
"time"
"github.com/forceu/gokapi/internal/helper"
)
var reservedChunks = make(map[string]map[string]reservation)
var reservationMutex sync.RWMutex
var gcIsRunning = false
const timeReservationWithoutUpload = 4 * 60
const timeReservationWithUpload = 23 * 60 * 60
type reservation struct {
Uuid string
Expiry int64
}
func GetCount(id string) int {
reservationMutex.RLock()
defer reservationMutex.RUnlock()
length := len(reservedChunks[id])
return length
}
func New(id string) string {
reservationMutex.Lock()
defer reservationMutex.Unlock()
uuid := helper.GenerateRandomString(32)
if reservedChunks[id] == nil {
reservedChunks[id] = make(map[string]reservation)
}
reservedChunks[id][uuid] = reservation{
Uuid: uuid,
Expiry: time.Now().Unix() + timeReservationWithoutUpload,
}
if !gcIsRunning {
gcIsRunning = true
go cleanUp(true)
}
return uuid
}
func SetComplete(id, uuid string) {
reservationMutex.Lock()
delete(reservedChunks[id], uuid)
reservationMutex.Unlock()
}
func SetUploading(id string, uuid string) bool {
reservationMutex.Lock()
defer reservationMutex.Unlock()
if reservedChunks[id] == nil {
return false
}
chunk, ok := reservedChunks[id][uuid]
if !ok {
return false
}
if chunk.Expiry < time.Now().Unix() {
return false
}
chunk.Expiry = time.Now().Unix() + timeReservationWithUpload
reservedChunks[id][uuid] = chunk
return true
}
func cleanUp(isPeriodic bool) {
reservationMutex.Lock()
for id, chunks := range reservedChunks {
now := time.Now().Unix()
for uuid, reservedChunk := range chunks {
if reservedChunk.Expiry < now {
delete(reservedChunks[id], uuid)
}
}
}
reservationMutex.Unlock()
if isPeriodic {
go func() {
time.Sleep(time.Minute * 5)
cleanUp(true)
}()
}
}
@@ -0,0 +1,64 @@
package filerequest
import (
"time"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/storage"
)
// New creates a new file request object. It is not stored yet,
// and an API key has to be generated manually
func New(user models.User) models.FileRequest {
return models.FileRequest{
Id: helper.GenerateRandomString(15),
UserId: user.Id,
CreationDate: time.Now().Unix(),
Name: "Unnamed file request",
}
}
func Get(id string) (models.FileRequest, bool) {
result, ok := database.GetFileRequest(id)
if !ok {
return models.FileRequest{}, false
}
result.Populate(database.GetAllMetadata(), configuration.Get().MaxFileSizeMB)
return result, true
}
func GetAll() []models.FileRequest {
result := database.GetAllFileRequests()
if len(result) == 0 {
return result
}
allFiles := database.GetAllMetadata()
maxServerSize := configuration.Get().MaxFileSizeMB
for i, request := range result {
request.Populate(allFiles, maxServerSize)
result[i] = request
}
return result
}
// Delete all files associated with a file request and the request itself
func Delete(request models.FileRequest) {
files := GetAllFiles(request)
storage.DeleteFiles(files, true)
database.DeleteFileRequest(request)
}
// GetAllFiles returns a list of all files associated with a file request
func GetAllFiles(request models.FileRequest) []models.File {
var result []models.File
files := database.GetAllMetadata()
for _, file := range files {
if file.UploadRequestId == request.Id {
result = append(result, file)
}
}
return result
}
@@ -0,0 +1,71 @@
package ratelimiter
import (
"sync"
"time"
"golang.org/x/time/rate"
)
var uuidLimiter = newLimiter()
// Currently unused
var byteLimiter = newLimiter()
type limiterEntry struct {
limiter *rate.Limiter
lastSeen time.Time
}
type Store struct {
mu sync.Mutex
limiters map[string]*limiterEntry
cleanupStarted bool
}
func newLimiter() *Store {
return &Store{
limiters: make(map[string]*limiterEntry),
}
}
func IsAllowedNewUuid(key string) bool {
return uuidLimiter.Get(key, 1, 4).Allow()
}
func (s *Store) Get(key string, r rate.Limit, burst int) *rate.Limiter {
s.mu.Lock()
defer s.mu.Unlock()
e, ok := s.limiters[key]
if !ok {
e = &limiterEntry{
limiter: rate.NewLimiter(r, burst),
}
}
e.lastSeen = time.Now()
s.limiters[key] = e
s.StartCleanup(12 * time.Hour)
return e.limiter
}
func (s *Store) StartCleanup(maxIdle time.Duration) {
if s.cleanupStarted {
return
}
s.cleanupStarted = true
go func() {
ticker := time.NewTicker(30 * time.Minute)
for range ticker.C {
now := time.Now()
s.mu.Lock()
for k, v := range s.limiters {
if now.Sub(v.lastSeen) > maxIdle {
delete(s.limiters, k)
}
}
s.mu.Unlock()
}
}()
}
@@ -6,6 +6,12 @@ import (
"context"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/credentials"
@@ -13,13 +19,9 @@ import (
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/forceu/gokapi/internal/encryption"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/webserver/headers"
"io"
"net/http"
"net/url"
"strings"
"time"
)
var awsConfig models.AwsConfig
@@ -106,11 +108,10 @@ func Upload(input io.Reader, file models.File) (string, error) {
return result.Location, nil
}
// Download downloads a file from AWS, used for encrypted files and testing
func Download(writer io.WriterAt, file models.File) (int64, error) {
// download downloads a file from AWS, used for testing
func download(writer io.WriterAt, file models.File) (int64, error) {
sess := createSession()
downloader := s3manager.NewDownloader(sess)
size, err := downloader.Download(writer, &s3.GetObjectInput{
Bucket: aws.String(file.AwsBucket),
Key: aws.String(file.SHA1),
@@ -121,13 +122,39 @@ func Download(writer io.WriterAt, file models.File) (int64, error) {
return size, nil
}
// ServeFile either redirects the user to a pre-signed download url (default) or downloads the file and serves it as a proxy (depending
// on configuration). Returns true if blocking operation (in order to set download status) or false if non-blocking.
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload bool) (bool, error) {
if !awsConfig.ProxyDownload {
return false, redirectToDownload(w, r, file, forceDownload)
// Stream downloads a file from AWS sequentially, used for saving to a Zip file
func Stream(writer io.Writer, file models.File) error {
sess := createSession()
s3svc := s3.New(sess)
obj, err := s3svc.GetObject(&s3.GetObjectInput{
Bucket: aws.String(file.AwsBucket),
Key: aws.String(file.SHA1),
})
if err != nil {
return err
}
return true, proxyDownload(w, file, forceDownload)
defer obj.Body.Close()
var reader io.Reader = obj.Body
if file.Encryption.IsEncrypted {
return encryption.DecryptReader(file.Encryption, obj.Body, writer)
}
_, err = io.Copy(writer, reader)
return err
}
// ServeFile either redirects the user to a pre-signed download url (default) or downloads the file and serves it as a proxy (depending
// on configuration). Returns true if blocking operation (to set download status) or false if non-blocking.
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload, forceDecryption bool) (bool, error) {
if forceDecryption {
return true, serveDecryptedFile(w, file)
}
if awsConfig.ProxyDownload {
return true, proxyDownload(w, file, forceDownload)
}
return false, redirectToDownload(w, r, file, forceDownload)
}
func getPresignedUrl(file models.File, forceDownload bool) (string, error) {
@@ -176,11 +203,29 @@ func proxyDownload(w http.ResponseWriter, file models.File, forceDownload bool)
return err
}
defer resp.Body.Close()
headers.Write(file, w, forceDownload)
headers.Write(file, w, forceDownload, false)
_, _ = io.Copy(w, resp.Body)
return nil
}
func serveDecryptedFile(w http.ResponseWriter, file models.File) error {
sess := createSession()
s3svc := s3.New(sess)
// 1. Get the object from S3
obj, err := s3svc.GetObject(&s3.GetObjectInput{
Bucket: aws.String(file.AwsBucket),
Key: aws.String(file.SHA1),
})
if err != nil {
return err
}
defer obj.Body.Close()
headers.Write(file, w, true, true)
return encryption.DecryptReader(file.Encryption, obj.Body, w)
}
func getTimeoutContext() (context.Context, context.CancelFunc) {
ctx := context.Background()
rContext, rCancel := context.WithTimeout(ctx, 5*time.Second)
@@ -5,12 +5,13 @@ package aws
import (
"bytes"
"errors"
"github.com/forceu/gokapi/internal/models"
"io"
"net/http"
"os"
"strconv"
"strings"
"github.com/forceu/gokapi/internal/models"
)
var uploadedFiles []models.File
@@ -107,7 +108,7 @@ func Upload(input io.Reader, file models.File) (string, error) {
}
// Download downloads a file from AWS
func Download(writer io.WriterAt, file models.File) (int64, error) {
func download(writer io.WriterAt, file models.File) (int64, error) {
if !isValidCredentials() {
return 0, errors.New("invalid credentials / invalid bucket / invalid region")
}
@@ -129,7 +130,7 @@ func isUploaded(file models.File) bool {
// ServeFile either redirects the user to a pre-signed download url (default) or downloads the file and serves it as a proxy (depending
// on configuration). Returns true if blocking operation (in order to set download status) or false if non-blocking.
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload bool) (bool, error) {
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload bool, forceDecryption bool) (bool, error) {
// TODO implement proxy as well
return false, RedirectToDownload(w, r, file, forceDownload)
}
@@ -199,3 +200,21 @@ func IsCorsCorrectlySet(bucket, gokapiUrl string) (bool, error) {
func GetDefaultBucketName() string {
return bucketName
}
// Stream downloads a file from AWS sequentially, used for saving to a Zip file
func Stream(writer io.Writer, file models.File) error {
if !isValidCredentials() {
return errors.New("invalid credentials / invalid bucket / invalid region")
}
if isUploaded(file) {
data, err := os.Open("data/" + file.SHA1)
if err != nil {
return err
}
defer data.Close()
_, err = io.Copy(writer, data)
return err
}
return errors.New("file not found")
}
@@ -4,9 +4,10 @@ package aws
import (
"errors"
"github.com/forceu/gokapi/internal/models"
"io"
"net/http"
"github.com/forceu/gokapi/internal/models"
)
const errorString = "AWS not supported in this build"
@@ -43,7 +44,7 @@ func Upload(input io.Reader, file models.File) (string, error) {
}
// Download downloads a file from AWS
func Download(writer io.WriterAt, file models.File) (int64, error) {
func download(writer io.WriterAt, file models.File) (int64, error) {
return 0, errors.New(errorString)
}
@@ -59,7 +60,7 @@ func RedirectToDownload(w http.ResponseWriter, r *http.Request, file models.File
// ServeFile either redirects the user to a pre-signed download url (default) or downloads the file and serves it as a proxy (depending
// on configuration). Returns true if blocking operation (in order to set download status) or false if non-blocking.
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload bool) (bool, error) {
func ServeFile(w http.ResponseWriter, r *http.Request, file models.File, forceDownload bool, forceDecryption bool) (bool, error) {
return false, errors.New(errorString)
}
@@ -82,3 +83,8 @@ func IsCorsCorrectlySet(bucket, gokapiUrl string) (bool, error) {
func GetDefaultBucketName() string {
return ""
}
// Stream downloads a file from AWS sequentially, used for saving to a Zip file
func Stream(writer io.Writer, file models.File) error {
return errors.New(errorString)
}
@@ -3,17 +3,18 @@
package aws
import (
"github.com/forceu/gokapi/internal/configuration/cloudconfig"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"github.com/johannesboyne/gofakes3"
"github.com/johannesboyne/gofakes3/backend/s3mem"
"io"
"net/http"
"net/http/httptest"
"os"
"strings"
"testing"
"github.com/forceu/gokapi/internal/configuration/cloudconfig"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"github.com/johannesboyne/gofakes3"
"github.com/johannesboyne/gofakes3/backend/s3mem"
)
var testFile, invalidFile, invalidBucket, invalidAll models.File
@@ -86,7 +87,7 @@ func TestUploadToAws(t *testing.T) {
func TestDownloadFromAws(t *testing.T) {
test.FileDoesNotExist(t, "test")
file, _ := os.Create("test")
size, err := Download(file, testFile)
size, err := download(file, testFile)
test.IsNil(t, err)
test.IsEqualBool(t, size == 16, true)
test.FileExists(t, "test")
@@ -110,8 +111,8 @@ func testServing(t *testing.T, expectRedirect, forceDownload bool) {
w := httptest.NewRecorder()
r := httptest.NewRequest("GET", "/download", nil)
isBlockng, err := ServeFile(w, r, testFile, forceDownload)
test.IsEqualBool(t, isBlockng, !expectRedirect)
isBlocking, err := ServeFile(w, r, testFile, forceDownload, false)
test.IsEqualBool(t, isBlocking, !expectRedirect)
test.IsNil(t, err)
response, err := io.ReadAll(w.Result().Body)
@@ -1,9 +1,10 @@
package pstatusdb
import (
"github.com/forceu/gokapi/internal/models"
"sync"
"time"
"github.com/forceu/gokapi/internal/models"
)
var statusMap = make(map[string]models.UploadStatus)
+26 -1
View File
@@ -37,7 +37,7 @@ func IsEqualString(t MockT, got, want string) {
}
}
// ResponseBodyContains fails test if http response does contain string
// ResponseBodyContains fails test if http response does not contain the string
func ResponseBodyContains(t MockT, got *httptest.ResponseRecorder, want string) {
t.Helper()
result, err := io.ReadAll(got.Result().Body)
@@ -47,6 +47,31 @@ func ResponseBodyContains(t MockT, got *httptest.ResponseRecorder, want string)
}
}
// ResponseBodyIs fails test if http response is not the exact string
func ResponseBodyIs(t MockT, got *httptest.ResponseRecorder, want string) {
t.Helper()
result, err := io.ReadAll(got.Result().Body)
IsNil(t, err)
IsEqualString(t, string(result), want)
}
// ResponseBodyIsWithAlternate fails test if http response is not the exact string of one of the supplied string
func ResponseBodyIsWithAlternate(t MockT, got *httptest.ResponseRecorder, want []string) {
t.Helper()
result, err := io.ReadAll(got.Result().Body)
IsNil(t, err)
found := false
for _, wantedString := range want {
if string(result) == wantedString {
found = true
break
}
}
if !found {
t.Errorf("Assertion failed, got: %v \n want: %s.\n\n", got, want)
}
}
// IsNotEqualString fails test if got and want are not identical
func IsNotEqualString(t MockT, got, want string) {
t.Helper()
@@ -262,14 +262,14 @@ func writeApiKeys() {
database.SaveApiKey(models.ApiKey{
Id: "validkey",
FriendlyName: "First Key",
Permissions: models.ApiPermAll, // TODO
Permissions: models.ApiPermView,
UserId: 5,
PublicId: "taiyeo6uLie6nu6eip0ieweiM5mahv",
})
database.SaveApiKey(models.ApiKey{
Id: "validkeyid7",
FriendlyName: "Key for uid 7",
Permissions: models.ApiPermAll, // TODO
Permissions: models.ApiPermUpload,
UserId: 7,
PublicId: "vu0eemi8eehaisuth3pahDai2eo6ze",
})
@@ -277,21 +277,21 @@ func writeApiKeys() {
Id: "GAh1IhXDvYnqfYLazWBqMB9HSFmNPO",
FriendlyName: "Second Key",
LastUsed: 1620671580,
Permissions: models.ApiPermAll, // TODO
Permissions: models.ApiPermNone,
UserId: 5,
PublicId: "yaeVohng1ohNohsh1vailizeil5ka5",
})
database.SaveApiKey(models.ApiKey{
Id: "jiREglQJW0bOqJakfjdVfe8T1EM8n8",
FriendlyName: "Unnamed Key",
Permissions: models.ApiPermAll, // TODO
Permissions: models.ApiPermNone,
UserId: 5,
PublicId: "ahYie4ophoo5OoGhahCe1neic6thah",
})
database.SaveApiKey(models.ApiKey{
Id: "okeCMWqhVMZSpt5c1qpCWhKvJJPifb",
FriendlyName: "Unnamed Key",
Permissions: models.ApiPermAll, // TODO
Permissions: models.ApiPermNone,
UserId: 5,
PublicId: "ugoo0roowoanahthei7ohSail5OChu",
})
+176 -32
View File
@@ -7,6 +7,7 @@ Handling of webserver and requests / uploads
import (
"bytes"
"context"
"crypto/subtle"
"embed"
"encoding/base64"
"errors"
@@ -30,6 +31,7 @@ import (
"github.com/forceu/gokapi/internal/logging"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/storage"
"github.com/forceu/gokapi/internal/storage/filerequest"
"github.com/forceu/gokapi/internal/webserver/api"
"github.com/forceu/gokapi/internal/webserver/authentication"
"github.com/forceu/gokapi/internal/webserver/authentication/oauth"
@@ -100,11 +102,13 @@ func Start() {
mux.HandleFunc("/changePassword", requireLogin(changePassword, true, true))
mux.HandleFunc("/d", showDownload)
mux.HandleFunc("/downloadFile", downloadFile)
mux.HandleFunc("/downloadPresigned", requireLogin(downloadPresigned, false, false))
mux.HandleFunc("/e2eSetup", requireLogin(showE2ESetup, true, false))
mux.HandleFunc("/error", showError)
mux.HandleFunc("/error-auth", showErrorAuth)
mux.HandleFunc("/error-header", showErrorHeader)
mux.HandleFunc("/error-oauth", showErrorIntOAuth)
mux.HandleFunc("/filerequests", requireLogin(showUploadRequest, true, false))
mux.HandleFunc("/forgotpw", forgotPassword)
mux.HandleFunc("/h/", showHotlink)
mux.HandleFunc("/hotlink/", showHotlink) // backward compatibility
@@ -112,6 +116,7 @@ func Start() {
mux.HandleFunc("/login", showLogin)
mux.HandleFunc("/logs", requireLogin(showLogs, true, false))
mux.HandleFunc("/logout", doLogout)
mux.HandleFunc("/publicUpload", showPublicUpload)
mux.HandleFunc("/uploadChunk", requireLogin(uploadChunk, false, false))
mux.HandleFunc("/uploadStatus", requireLogin(sse.GetStatusSSE, false, false))
mux.HandleFunc("/users", requireLogin(showUserAdmin, true, false))
@@ -228,7 +233,7 @@ type redirectValues struct {
PasswordRequired bool
}
// Handling of /id/?/? - used when filename shall be displayed, will redirect to regular download URL
// Handling of /id/?/? - used when filename shall be displayed, will redirect to the regular download URL
func redirectFromFilename(w http.ResponseWriter, r *http.Request) {
addNoCacheHeader(w)
id := r.PathValue("id")
@@ -330,9 +335,10 @@ func changePassword(w http.ResponseWriter, r *http.Request) {
return
}
}
config := configuration.Get()
err = templateFolder.ExecuteTemplate(w, "changepw",
genericView{PublicName: configuration.Get().PublicName,
MinPasswordLength: configuration.Environment.MinLengthPassword,
genericView{PublicName: config.PublicName,
MinPasswordLength: configuration.GetEnvironment().MinLengthPassword,
ErrorMessage: errMessage,
CustomContent: customStaticInfo})
helper.CheckIgnoreTimeout(err)
@@ -342,7 +348,7 @@ func validateNewPassword(newPassword string, user models.User) (string, string,
if len(newPassword) == 0 {
return "", user.Password, false
}
if len(newPassword) < configuration.Environment.MinLengthPassword {
if len(newPassword) < configuration.GetEnvironment().MinLengthPassword {
return "Password is too short", user.Password, false
}
newPasswordHash := configuration.HashPassword(newPassword, false)
@@ -354,21 +360,32 @@ func validateNewPassword(newPassword string, user models.User) (string, string,
// Handling of /error
func showError(w http.ResponseWriter, r *http.Request) {
const invalidFile = 0
const noCipherSupplied = 1
const wrongCipher = 2
const (
invalidFile = iota
noCipherSupplied
wrongCipher
invalidFileRequest
)
errorReason := invalidFile
cardWidth := 18
if r.URL.Query().Has("e2e") {
errorReason = noCipherSupplied
cardWidth = 25
}
if r.URL.Query().Has("key") {
errorReason = wrongCipher
cardWidth = 25
}
if r.URL.Query().Has("fr") {
errorReason = invalidFileRequest
cardWidth = 30
}
err := templateFolder.ExecuteTemplate(w, "error", genericView{
ErrorId: errorReason,
PublicName: configuration.Get().PublicName,
CustomContent: customStaticInfo})
ErrorId: errorReason,
ErrorCardWidth: cardWidth,
PublicName: configuration.Get().PublicName,
CustomContent: customStaticInfo})
helper.CheckIgnoreTimeout(err)
}
@@ -408,8 +425,19 @@ func forgotPassword(w http.ResponseWriter, r *http.Request) {
helper.CheckIgnoreTimeout(err)
}
// Handling of /filerequest
func showUploadRequest(w http.ResponseWriter, r *http.Request) {
userId, err := authentication.GetUserFromRequest(r)
if err != nil {
panic(err)
}
view := (&AdminView{}).convertGlobalConfig(ViewFileRequests, userId)
err = templateFolder.ExecuteTemplate(w, "uploadreq", view)
helper.CheckIgnoreTimeout(err)
}
// Handling of /api
// If user is authenticated, this menu lists all uploads and enables uploading new files
// If the user is authenticated, this menu lists all uploads and enables uploading new files
func showApiAdmin(w http.ResponseWriter, r *http.Request) {
userId, err := authentication.GetUserFromRequest(r)
if err != nil {
@@ -509,9 +537,9 @@ type LoginView struct {
// If it exists, a download form is shown, or a password needs to be entered.
func showDownload(w http.ResponseWriter, r *http.Request) {
addNoCacheHeader(w)
keyId := queryUrl(w, r, "error")
keyId := queryUrl(w, r, "id", "error")
file, ok := storage.GetFile(keyId)
if !ok {
if !ok || file.IsFileRequest() {
redirect(w, "error")
return
}
@@ -573,19 +601,19 @@ func showHotlink(w http.ResponseWriter, r *http.Request) {
hotlinkId = strings.Replace(hotlinkId, "/h/", "", 1)
addNoCacheHeader(w)
file, ok := storage.GetFileByHotlink(hotlinkId)
if !ok {
if !ok || file.IsFileRequest() {
w.Header().Set("Content-Type", "image/svg+xml")
_, _ = w.Write(imageExpiredPicture)
return
}
storage.ServeFile(file, w, r, false)
storage.ServeFile(file, w, r, false, true, false)
}
// Checks if a file is associated with the GET parameter from the current URL
// Stops for 500ms to limit brute forcing if invalid key and redirects to redirectUrl
func queryUrl(w http.ResponseWriter, r *http.Request, redirectUrl string) string {
keys, ok := r.URL.Query()["id"]
if !ok || len(keys[0]) < configuration.Get().LengthId {
func queryUrl(w http.ResponseWriter, r *http.Request, keyword string, redirectUrl string) string {
keys, ok := r.URL.Query()[keyword]
if !ok || len(keys[0]) < configuration.GetEnvironment().LengthId {
select {
case <-time.After(500 * time.Millisecond):
}
@@ -613,8 +641,8 @@ func showAdminMenu(w http.ResponseWriter, r *http.Request) {
}
view := (&AdminView{}).convertGlobalConfig(ViewMain, user)
if len(configuration.Environment.ActiveDeprecations) > 0 {
if user.UserLevel == models.UserLevelSuperAdmin {
if len(configuration.GetEnvironment().ActiveDeprecations) > 0 {
if user.IsSuperAdmin() {
view.ShowDeprecationNotice = true
}
}
@@ -683,11 +711,12 @@ type e2ESetupView struct {
CustomContent customStatic
}
// AdminView contains parameters for all admin related pages
// AdminView contains parameters for all admin-related pages
type AdminView struct {
Items []models.FileApiOutput
ApiKeys []models.ApiKey
Users []userInfo
FileRequests []models.FileRequest
ActiveUser models.User
UserMap map[int]*models.User
ServerUrl string
@@ -707,11 +736,13 @@ type AdminView struct {
ChunkSize int
MaxParallelUploads int
MinLengthPassword int
FileRequestMaxFiles int
FileRequestMaxSize int
TimeNow int64
CustomContent customStatic
}
// getUserMap needs to return the map with pointers, otherwise template cannot call
// getUserMap needs to return the map with pointers; otherwise template cannot call
// functions associated with it
func getUserMap() map[int]*models.User {
result := make(map[int]*models.User)
@@ -731,6 +762,8 @@ const (
ViewAPI
// ViewUsers is the identifier for the user management menu
ViewUsers
// ViewFileRequests is the identifier for the file request menu
ViewFileRequests
)
// Converts the globalConfig variable to an AdminView struct to pass the infos to
@@ -754,17 +787,17 @@ func (u *AdminView) convertGlobalConfig(view int, user models.User) *AdminView {
helper.Check(err)
metaDataList = append(metaDataList, fileInfo)
}
metaDataList = sortMetaData(metaDataList)
metaDataList = sortMetaDataApi(metaDataList)
case ViewAPI:
for _, apiKey := range database.GetAllApiKeys() {
// Double-checking if user of API key exists
// Double-checking if the owner of the API key exists
// If the user was manually deleted from the database, this could lead to a crash
// in the API view
_, ok := u.UserMap[apiKey.UserId]
if !ok {
continue
}
if !apiKey.IsSystemKey {
if !apiKey.IsSystemKey && !apiKey.IsUploadRequestKey() {
if apiKey.UserId == user.Id || user.HasPermissionManageApi() {
apiKeyList = append(apiKeyList, apiKey)
}
@@ -787,6 +820,25 @@ func (u *AdminView) convertGlobalConfig(view int, user models.User) *AdminView {
}
u.Users = append(u.Users, userWithUploads)
}
case ViewFileRequests:
for _, fileRequest := range filerequest.GetAll() {
// Double-checking if the owner of the file request exists
// If the user was manually deleted from the database, this could lead to a crash
// in the file request view
_, ok := u.UserMap[fileRequest.UserId]
if !ok {
continue
}
if fileRequest.UserId != user.Id && !user.HasPermissionListOtherUploads() {
continue
}
fileRequest.Files = sortMetaData(fileRequest.Files)
u.FileRequests = append(u.FileRequests, fileRequest)
if !user.IsAdmin() {
u.FileRequestMaxFiles = configuration.GetEnvironment().MaxFilesGuestUpload
u.FileRequestMaxSize = configuration.GetEnvironment().MaxSizeGuestUploadMb
}
}
}
u.ServerUrl = config.ServerUrl
@@ -801,15 +853,14 @@ func (u *AdminView) convertGlobalConfig(view int, user models.User) *AdminView {
u.IsUserTabAvailable = config.Authentication.Method != models.AuthenticationDisabled
u.EndToEndEncryption = config.Encryption.Level == encryption.EndToEndEncryption
u.MaxParallelUploads = config.MaxParallelUploads
u.MinLengthPassword = config.MinLengthPassword
u.ChunkSize = config.ChunkSize
u.IncludeFilename = config.IncludeFilename
return u
}
// sortMetaData arranges the provided array so that Fies are sorted by most recent upload first and if that is equal
// sortMetaDataApi arranges the provided array so that Fies are sorted by the most recent upload first and if that is equal,
// then by most time remaining first. If that is equal, then sort by ID.
func sortMetaData(input []models.FileApiOutput) []models.FileApiOutput {
func sortMetaDataApi(input []models.FileApiOutput) []models.FileApiOutput {
sort.Slice(input[:], func(i, j int) bool {
if input[i].UploadDate != input[j].UploadDate {
return input[i].UploadDate > input[j].UploadDate
@@ -822,6 +873,18 @@ func sortMetaData(input []models.FileApiOutput) []models.FileApiOutput {
return input
}
// sortMetaData arranges the provided array so that Fies are sorted by the most recent upload first then sort by ID.
// Currently only used for the files of File Requests, all others use sortMetaDataApi
func sortMetaData(input []models.File) []models.File {
sort.Slice(input[:], func(i, j int) bool {
if input[i].UploadDate != input[j].UploadDate {
return input[i].UploadDate > input[j].UploadDate
}
return input[i].Id > input[j].Id
})
return input
}
// sortApiKeys arranges the provided array so that API keys are sorted by most recent usage first and if that is equal
// then by ID
func sortApiKeys(input []models.ApiKey) []models.ApiKey {
@@ -839,6 +902,43 @@ type userInfo struct {
User models.User
}
// Handling of /publicUpload
func showPublicUpload(w http.ResponseWriter, r *http.Request) {
addNoCacheHeader(w)
fileRequestId := queryUrl(w, r, "id", "error?fr")
request, ok := filerequest.Get(fileRequestId)
if !ok {
redirect(w, "error?fr")
return
}
if !request.IsUnlimitedTime() && request.Expiry < time.Now().Unix() {
redirect(w, "error?fr")
return
}
if !request.IsUnlimitedFiles() && request.UploadedFiles >= request.MaxFiles {
redirect(w, "error?fr")
return
}
apiKey := queryUrl(w, r, "key", "error?fr")
if subtle.ConstantTimeCompare([]byte(request.ApiKey), []byte(apiKey)) != 1 {
redirect(w, "error?fr")
return
}
config := configuration.Get()
view := publicUploadView{
PublicName: config.PublicName,
ChunkSize: config.ChunkSize,
MaxServerSize: config.MaxFileSizeMB,
FileRequest: &request,
CustomContent: customStaticInfo,
}
err := templateFolder.ExecuteTemplate(w, "publicUpload", view)
helper.CheckIgnoreTimeout(err)
}
// Handling of /uploadChunk
// If the user is authenticated, this parses the uploaded chunk and stores it
func uploadChunk(w http.ResponseWriter, r *http.Request) {
@@ -848,7 +948,7 @@ func uploadChunk(w http.ResponseWriter, r *http.Request) {
responseError(w, storage.ErrorFileTooLarge)
}
r.Body = http.MaxBytesReader(w, r.Body, maxUpload)
err := fileupload.ProcessNewChunk(w, r, false)
err, _ := fileupload.ProcessNewChunk(w, r, false, "")
responseError(w, err)
}
@@ -873,14 +973,46 @@ func downloadFileWithNameInUrl(w http.ResponseWriter, r *http.Request) {
// Handling of /downloadFile
// Outputs the file to the user and reduces the download remaining count for the file
func downloadFile(w http.ResponseWriter, r *http.Request) {
id := queryUrl(w, r, "error")
id := queryUrl(w, r, "id", "error")
serveFile(id, true, w, r)
}
// Handling of /downloadPresigned
// Outputs the file to the user and reduces the download remaining count for the file, if requested
func downloadPresigned(w http.ResponseWriter, r *http.Request) {
addNoCacheHeader(w)
presignKey, ok := r.URL.Query()["key"]
if !ok {
responseError(w, storage.ErrorInvalidPresign)
return
}
presign, ok := database.GetPresignedUrl(presignKey[0])
if !ok || presign.Expiry < time.Now().Unix() {
responseError(w, storage.ErrorInvalidPresign)
return
}
files := make([]models.File, 0)
for _, file := range presign.FileIds {
storedFile, ok := storage.GetFile(file)
if !ok {
responseError(w, storage.ErrorFileNotFound)
return
}
files = append(files, storedFile)
}
database.DeletePresignedUrl(presign.Id)
if len(files) == 1 {
storage.ServeFile(files[0], w, r, true, false, true)
return
}
storage.ServeFilesAsZip(files, presign.Filename, w, r)
}
func serveFile(id string, isRootUrl bool, w http.ResponseWriter, r *http.Request) {
addNoCacheHeader(w)
savedFile, ok := storage.GetFile(id)
if !ok {
if !ok || savedFile.IsFileRequest() {
if isRootUrl {
redirect(w, "error")
} else {
@@ -898,7 +1030,7 @@ func serveFile(id string, isRootUrl bool, w http.ResponseWriter, r *http.Request
return
}
}
storage.ServeFile(savedFile, w, r, true)
storage.ServeFile(savedFile, w, r, true, true, false)
}
func requireLogin(next http.HandlerFunc, isUiCall, isPwChangeView bool) http.HandlerFunc {
@@ -975,6 +1107,7 @@ type genericView struct {
RedirectUrl string
ErrorMessage string
ErrorId int
ErrorCardWidth int
MinPasswordLength int
CustomContent customStatic
}
@@ -990,3 +1123,14 @@ type oauthErrorView struct {
ErrorProvidedMessage string
CustomContent customStatic
}
// A view containing parameters for the public upload page
type publicUploadView struct {
IsAdminView bool
IsDownloadView bool
PublicName string
ChunkSize int
MaxServerSize int
CustomContent customStatic
FileRequest *models.FileRequest
}
+18 -12
View File
@@ -6,6 +6,13 @@ import (
"bufio"
"encoding/json"
"errors"
"html/template"
"net/http"
"os"
"strings"
"testing"
"time"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/models"
@@ -13,12 +20,6 @@ import (
"github.com/forceu/gokapi/internal/test"
"github.com/forceu/gokapi/internal/test/testconfiguration"
"github.com/forceu/gokapi/internal/webserver/authentication"
"html/template"
"net/http"
"os"
"strings"
"testing"
"time"
)
func TestMain(m *testing.M) {
@@ -236,17 +237,22 @@ func TestError(t *testing.T) {
t.Parallel()
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://localhost:53843/error",
RequiredContent: []string{"Sorry, this file cannot be found"},
RequiredContent: []string{"The link may have expired or the file has been downloaded too many times"},
IsHtml: true,
})
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://localhost:53843/error?e2e",
RequiredContent: []string{"This file is encrypted and no key has been passed"},
RequiredContent: []string{"This file is encrypted, but no key was provided"},
IsHtml: true,
})
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://localhost:53843/error?key",
RequiredContent: []string{"This file is encrypted and an incorrect key has been passed"},
RequiredContent: []string{"This file is encrypted, but the provided key is incorrect"},
IsHtml: true,
})
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://localhost:53843/error?fr",
RequiredContent: []string{"The file limit for this upload request has been reached"},
IsHtml: true,
})
}
@@ -586,7 +592,7 @@ func TestProcessApi(t *testing.T) {
// Not authorised
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://127.0.0.1:53843/api/files/list",
RequiredContent: []string{"{\"Result\":\"error\",\"ErrorMessage\":\"Unauthorized\"}"},
RequiredContent: []string{`{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`},
ExcludedContent: []string{"smallfile2"},
ResultCode: 401,
Cookies: []test.Cookie{{
@@ -596,7 +602,7 @@ func TestProcessApi(t *testing.T) {
})
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://127.0.0.1:53843/api/files/list",
RequiredContent: []string{"{\"Result\":\"error\",\"ErrorMessage\":\"Unauthorized\"}"},
RequiredContent: []string{`{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`},
ExcludedContent: []string{"smallfile2"},
ResultCode: 401,
Headers: []test.Header{{"apikey", "invalid"}},
@@ -605,7 +611,7 @@ func TestProcessApi(t *testing.T) {
// Valid session does not grant API access
test.HttpPageResult(t, test.HttpTestConfig{
Url: "http://127.0.0.1:53843/api/files/list",
RequiredContent: []string{"{\"Result\":\"error\",\"ErrorMessage\":\"Unauthorized\"}"},
RequiredContent: []string{`{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`},
ExcludedContent: []string{"smallfile2"},
ResultCode: 401,
Cookies: []test.Cookie{{
+476 -113
View File
@@ -15,12 +15,17 @@ import (
"github.com/forceu/gokapi/internal/logging"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/storage"
"github.com/forceu/gokapi/internal/storage/chunking"
"github.com/forceu/gokapi/internal/storage/chunking/chunkreservation"
"github.com/forceu/gokapi/internal/storage/filerequest"
"github.com/forceu/gokapi/internal/storage/filerequest/ratelimiter"
"github.com/forceu/gokapi/internal/webserver/api/errorcodes"
"github.com/forceu/gokapi/internal/webserver/authentication/users"
"github.com/forceu/gokapi/internal/webserver/fileupload"
)
const LengthPublicId = 35
const LengthApiKey = 30
const minLengthUser = 2
// Process parses the request and executes the API call or returns an error message to the sender
func Process(w http.ResponseWriter, r *http.Request) {
@@ -30,17 +35,17 @@ func Process(w http.ResponseWriter, r *http.Request) {
routing, ok := getRouting(requestUrl)
if !ok {
sendError(w, http.StatusBadRequest, "Invalid request")
sendError(w, http.StatusBadRequest, errorcodes.InvalidUrl, "Invalid request")
return
}
var user models.User
user, ok = isAuthorisedForApi(r, routing)
if !ok {
sendError(w, http.StatusUnauthorized, "Unauthorized")
sendError(w, http.StatusUnauthorized, errorcodes.InvalidApiKey, "Unauthorized")
return
}
if routing.AdminOnly && (user.UserLevel != models.UserLevelAdmin && user.UserLevel != models.UserLevelSuperAdmin) {
sendError(w, http.StatusUnauthorized, "Unauthorized")
if routing.AdminOnly && !user.IsAdmin() {
sendError(w, http.StatusUnauthorized, errorcodes.AdminOnly, "Unauthorized")
return
}
if routing.RequestParser == nil {
@@ -50,7 +55,7 @@ func Process(w http.ResponseWriter, r *http.Request) {
parser := routing.RequestParser.New()
err := parser.ParseRequest(r)
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
sendError(w, http.StatusBadRequest, errorcodes.CannotParse, err.Error())
return
}
routing.Continue(w, parser, user)
@@ -67,11 +72,11 @@ func apiEditFile(w http.ResponseWriter, r requestParser, user models.User) {
}
file, ok := database.GetMetaDataById(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid file ID provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid file ID provided.")
return
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermEditOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to edit file.")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to edit file.")
return
}
if request.UnlimitedDownloads {
@@ -108,17 +113,18 @@ func apiEditFile(w http.ResponseWriter, r requestParser, user models.User) {
}
// generateNewKey generates and saves a new API key
func generateNewKey(defaultPermissions bool, userId int, friendlyName string) models.ApiKey {
func generateNewKey(defaultPermissions bool, userId int, friendlyName, filerequstId string) models.ApiKey {
if friendlyName == "" {
friendlyName = "Unnamed key"
}
newKey := models.ApiKey{
Id: helper.GenerateRandomString(LengthApiKey),
PublicId: helper.GenerateRandomString(LengthPublicId),
FriendlyName: friendlyName,
Permissions: models.ApiPermDefault,
IsSystemKey: false,
UserId: userId,
Id: helper.GenerateRandomString(LengthApiKey),
PublicId: helper.GenerateRandomString(LengthPublicId),
FriendlyName: friendlyName,
Permissions: models.ApiPermDefault,
IsSystemKey: false,
UserId: userId,
UploadRequestId: filerequstId,
}
if !defaultPermissions {
newKey.Permissions = models.ApiPermNone
@@ -134,11 +140,11 @@ func apiDeleteKey(w http.ResponseWriter, r requestParser, user models.User) {
}
apiKeyOwner, apiKey, ok := isValidKeyForEditing(request.KeyId)
if !ok {
sendError(w, http.StatusNotFound, "Invalid key ID provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid key ID provided.")
return
}
if apiKeyOwner.Id != user.Id && !user.HasPermission(models.UserPermManageApiKeys) {
sendError(w, http.StatusUnauthorized, "No permission to delete this API key")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to delete this API key")
return
}
database.DeleteApiKey(apiKey.Id)
@@ -151,28 +157,33 @@ func apiModifyApiKey(w http.ResponseWriter, r requestParser, user models.User) {
}
apiKeyOwner, apiKey, ok := isValidKeyForEditing(request.KeyId)
if !ok {
sendError(w, http.StatusNotFound, "Invalid key ID provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid key ID provided.")
return
}
if apiKeyOwner.Id != user.Id && !user.HasPermission(models.UserPermManageApiKeys) {
sendError(w, http.StatusUnauthorized, "No permission to delete this API key")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to delete this API key")
return
}
switch request.Permission {
case models.ApiPermReplace:
if !apiKeyOwner.HasPermissionReplace() {
sendError(w, http.StatusUnauthorized, "Insufficient user permission for owner to set this API permission")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "Insufficient user permission for owner to set this API permission")
return
}
case models.ApiPermManageUsers:
if !apiKeyOwner.HasPermissionManageUsers() {
sendError(w, http.StatusUnauthorized, "Insufficient user permission for owner to set this API permission")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "Insufficient user permission for owner to set this API permission")
return
}
case models.ApiPermManageLogs:
if !apiKeyOwner.HasPermissionManageLogs() {
sendError(w, http.StatusUnauthorized, "Insufficient user permission for owner to set this API permission")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "Insufficient user permission for owner to set this API permission")
return
}
case models.ApiPermManageFileRequests:
if !apiKeyOwner.HasPermissionCreateFileRequests() {
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "Insufficient user permission for owner to set this API permission")
return
}
default:
@@ -203,7 +214,7 @@ func isValidKeyForEditing(apiKey string) (models.User, models.ApiKey, bool) {
func isValidUserForEditing(w http.ResponseWriter, userId int) (models.User, bool) {
user, ok := database.GetUser(userId)
if !ok {
sendError(w, http.StatusNotFound, "Invalid user id provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid user id provided.")
return models.User{}, false
}
return user, true
@@ -214,7 +225,7 @@ func apiCreateApiKey(w http.ResponseWriter, r requestParser, user models.User) {
if !ok {
panic("invalid parameter passed")
}
key := generateNewKey(request.BasicPermissions, user.Id, request.FriendlyName)
key := generateNewKey(request.BasicPermissions, user.Id, request.FriendlyName, "")
output := models.ApiKeyOutput{
Result: "OK",
Id: key.Id,
@@ -230,23 +241,16 @@ func apiCreateUser(w http.ResponseWriter, r requestParser, user models.User) {
if !ok {
panic("invalid parameter passed")
}
if len(request.Username) < minLengthUser {
sendError(w, http.StatusBadRequest, "Invalid username provided.")
return
}
_, ok = database.GetUserByName(request.Username)
if ok {
sendError(w, http.StatusConflict, "User already exists.")
return
}
newUser := models.User{
Name: request.Username,
UserLevel: models.UserLevelUser,
}
database.SaveUser(newUser, true)
newUser, ok = database.GetUserByName(request.Username)
if !ok {
sendError(w, http.StatusInternalServerError, "Could not save user")
newUser, err := users.Create(request.Username)
if err != nil {
switch {
case errors.Is(err, users.ErrorNameToShort):
sendError(w, http.StatusBadRequest, errorcodes.NoPermission, "Invalid username provided.")
case errors.Is(err, users.ErrorUserExists):
sendError(w, http.StatusConflict, errorcodes.AlreadyExists, "User already exists.")
default:
sendError(w, http.StatusInternalServerError, errorcodes.InternalServer, err.Error())
}
return
}
logging.LogUserCreation(newUser, user)
@@ -260,16 +264,16 @@ func apiChangeFriendlyName(w http.ResponseWriter, r requestParser, user models.U
}
ownerApiKey, apiKey, ok := isValidKeyForEditing(request.KeyId)
if !ok {
sendError(w, http.StatusNotFound, "Invalid key ID provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid key ID provided.")
return
}
if ownerApiKey.Id != user.Id && !user.HasPermission(models.UserPermManageApiKeys) {
sendError(w, http.StatusUnauthorized, "No permission to edit this key")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to edit this API key")
return
}
err := renameApiKeyFriendlyName(apiKey.Id, request.FriendlyName)
if err != nil {
sendError(w, http.StatusInternalServerError, err.Error())
sendError(w, http.StatusInternalServerError, errorcodes.InternalServer, err.Error())
return
}
}
@@ -296,11 +300,11 @@ func apiDeleteFile(w http.ResponseWriter, r requestParser, user models.User) {
}
file, ok := database.GetMetaDataById(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid file ID provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid file ID provided.")
return
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermDeleteOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to delete this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to delete this file")
return
}
logging.LogDelete(file, user)
@@ -318,16 +322,16 @@ func apiRestoreFile(w http.ResponseWriter, r requestParser, user models.User) {
}
file, ok := database.GetMetaDataById(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid file ID provided or file has already been deleted.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid file ID provided or file has already been deleted.")
return
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermDeleteOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to restore this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to restore this file")
return
}
file, ok = storage.CancelPendingFileDeletion(file.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid file ID provided or file has already been deleted.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid file ID provided or file has already been deleted.")
return
}
logging.LogRestore(file, user)
@@ -339,18 +343,109 @@ func apiChunkAdd(w http.ResponseWriter, r requestParser, _ models.User) {
if !ok {
panic("invalid parameter passed")
}
maxUpload := int64(configuration.Get().MaxFileSizeMB) * 1024 * 1024
if request.Request.ContentLength > maxUpload {
sendError(w, http.StatusBadRequest, storage.ErrorFileTooLarge.Error())
return
statusCode, errCode, errString := processNewChunk(w, request, configuration.Get().MaxFileSizeMB, "")
if statusCode != http.StatusOK {
sendError(w, statusCode, errCode, errString)
}
}
request.Request.Body = http.MaxBytesReader(w, request.Request.Body, maxUpload)
err := fileupload.ProcessNewChunk(w, request.Request, true)
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
func apiChunkReserve(w http.ResponseWriter, r requestParser, _ models.User) {
request, ok := r.(*paramChunkReserve)
if !ok {
panic("invalid parameter passed")
}
fileRequest, ok, status, errorCode, errorMsg := checkFileRequestAndApiKey(request.Id, request.ApiKey)
if !ok {
sendError(w, status, errorCode, errorMsg)
return
}
if fileRequest.FilesRemaining() <= 0 && !fileRequest.IsUnlimitedFiles() {
sendError(w, http.StatusBadRequest, errorcodes.CannotUploadMoreFiles, "No more files can be uploaded for this file request")
return
}
if fileRequest.IsUnlimitedFiles() && !ratelimiter.IsAllowedNewUuid(fileRequest.Id) {
sendError(w, http.StatusTooManyRequests, errorcodes.RateLimited, "Too many reservations for this file request. Please wait a few seconds before reserving a new uuid.")
return
}
uuid := chunkreservation.New(fileRequest.Id)
result, err := json.Marshal(struct {
Result string `json:"Result"`
Uuid string `json:"Uuid"`
}{"OK", uuid})
helper.Check(err)
_, _ = w.Write(result)
}
func apiChunkUnreserve(w http.ResponseWriter, r requestParser, _ models.User) {
request, ok := r.(*paramChunkUnreserve)
if !ok {
panic("invalid parameter passed")
}
fileRequest, ok, status, errorCode, errorMsg := checkFileRequestAndApiKey(request.Id, request.ApiKey)
if !ok {
sendError(w, status, errorCode, errorMsg)
return
}
chunkreservation.SetComplete(fileRequest.Id, request.Uuid)
_ = chunking.DeleteChunk(request.Uuid)
_, _ = w.Write([]byte(`{"Result":"OK"}`))
}
func apiChunkUploadRequestAdd(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramChunkUploadRequestAdd)
if !ok {
panic("invalid parameter passed")
}
fileRequest, ok, status, errorCode, errorMsg := checkFileRequestAndApiKey(request.FileRequestId, request.ApiKey)
if !ok {
sendError(w, status, errorCode, errorMsg)
return
}
maxUpload := configuration.Get().MaxFileSizeMB
if !user.IsAdmin() && configuration.GetEnvironment().MaxSizeGuestUploadMb != 0 {
maxUpload = min(maxUpload, configuration.GetEnvironment().MaxSizeGuestUploadMb)
}
if !fileRequest.IsUnlimitedSize() {
maxUpload = min(maxUpload, fileRequest.MaxSize)
}
statusCode, errorCode, errString := processNewChunk(w, request, maxUpload, fileRequest.Id)
if statusCode != http.StatusOK {
sendError(w, statusCode, errorCode, errString)
}
}
func checkFileRequestAndApiKey(fileRequestId, apiKey string) (models.FileRequest, bool, int, int, string) {
fileRequest, ok := filerequest.Get(fileRequestId)
if !ok {
return models.FileRequest{}, false, http.StatusNotFound, errorcodes.NotFound, "FileRequest does not exist with the given ID"
}
if fileRequest.ApiKey != apiKey {
return models.FileRequest{}, false, http.StatusUnauthorized, errorcodes.InvalidApiKey, "Invalid API key"
}
if !fileRequest.IsUnlimitedTime() && fileRequest.Expiry < time.Now().Unix() {
return models.FileRequest{}, false, http.StatusUnauthorized, errorcodes.RequestExpired, "Filerequest has expired"
}
if !fileRequest.IsUnlimitedFiles() && fileRequest.UploadedFiles >= fileRequest.MaxFiles {
return models.FileRequest{}, false, http.StatusUnauthorized, errorcodes.CannotUploadMoreFiles, "Max file count has already been reached for this file request"
}
return fileRequest, true, 0, 0, ""
}
type chunkParams interface {
GetRequest() *http.Request
}
func processNewChunk(w http.ResponseWriter, request chunkParams, maxFileSizeMb int, filerequestId string) (int, int, string) {
maxUpload := int64(maxFileSizeMb) * 1024 * 1024
if request.GetRequest().ContentLength > maxUpload {
return http.StatusBadRequest, errorcodes.FileTooLarge, storage.ErrorFileTooLarge.Error()
}
request.GetRequest().Body = http.MaxBytesReader(w, request.GetRequest().Body, maxUpload)
err, errCode := fileupload.ProcessNewChunk(w, request.GetRequest(), true, filerequestId)
if err != nil {
return http.StatusBadRequest, errCode, err.Error()
}
return http.StatusOK, 0, ""
}
func apiChunkComplete(w http.ResponseWriter, r requestParser, user models.User) {
@@ -358,31 +453,57 @@ func apiChunkComplete(w http.ResponseWriter, r requestParser, user models.User)
if !ok {
panic("invalid parameter passed")
}
if request.IsNonBlocking {
go doBlockingPartCompleteChunk(nil, request, user)
_, _ = io.WriteString(w, "{\"result\":\"OK\"}")
return
}
doBlockingPartCompleteChunk(w, request, user)
}
func doBlockingPartCompleteChunk(w http.ResponseWriter, request *paramChunkComplete, user models.User) {
uploadRequest := fileupload.CreateUploadConfig(request.AllowedDownloads,
uploadParams := fileupload.CreateUploadConfig(request.AllowedDownloads,
request.ExpiryDays,
request.Password,
request.UnlimitedTime,
request.UnlimitedDownloads,
request.IsE2E,
request.FileSize)
file, err := fileupload.CompleteChunk(request.Uuid, request.FileHeader, user.Id, uploadRequest)
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
request.FileSize,
"")
if request.IsNonBlocking {
go doBlockingPartCompleteChunk(nil, request.Uuid, request.FileHeader, user, uploadParams)
_, _ = io.WriteString(w, "{\"result\":\"OK\"}")
return
}
logging.LogUpload(file, user)
doBlockingPartCompleteChunk(w, request.Uuid, request.FileHeader, user, uploadParams)
}
func doBlockingPartCompleteChunk(w http.ResponseWriter, uuid string, fileHeader chunking.FileHeader, user models.User, uploadParameters models.UploadParameters) {
file, err := fileupload.CompleteChunk(uuid, fileHeader, user.Id, uploadParameters)
if err != nil {
sendError(w, http.StatusBadRequest, errorcodes.UnspecifiedError, err.Error())
return
}
if uploadParameters.FileRequestId != "" {
chunkreservation.SetComplete(uploadParameters.FileRequestId, uuid)
}
fr, _ := filerequest.Get(uploadParameters.FileRequestId)
logging.LogUpload(file, user, fr)
outputFileJson(w, file)
}
func apiChunkUploadRequestComplete(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramChunkUploadRequestComplete)
if !ok {
panic("invalid parameter passed")
}
fileRequest, ok, status, errorCode, errorMsg := checkFileRequestAndApiKey(request.FileRequestId, request.ApiKey)
if !ok {
sendError(w, status, errorCode, errorMsg)
return
}
uploadParams := fileupload.CreateUploadConfig(0,
0, "", true, true,
false, request.FileSize, fileRequest.Id)
if request.IsNonBlocking {
go doBlockingPartCompleteChunk(nil, request.Uuid, request.FileHeader, user, uploadParams)
_, _ = io.WriteString(w, "{\"result\":\"OK\"}")
return
}
doBlockingPartCompleteChunk(w, request.Uuid, request.FileHeader, user, uploadParams)
}
func apiVersionInfo(w http.ResponseWriter, _ requestParser, _ models.User) {
type versionInfo struct {
Version string
@@ -408,18 +529,25 @@ func apiConfigInfo(w http.ResponseWriter, _ requestParser, _ models.User) {
_, _ = w.Write(result)
}
func apiList(w http.ResponseWriter, _ requestParser, user models.User) {
validFiles := getFilesForUser(user)
func apiList(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramFilesListAll)
if !ok {
panic("invalid parameter passed")
}
validFiles := getFilesForUser(user, request.ShowFileRequests)
result, err := json.Marshal(validFiles)
helper.Check(err)
_, _ = w.Write(result)
}
func getFilesForUser(user models.User) []models.FileApiOutput {
func getFilesForUser(user models.User, includeUploadRequests bool) []models.FileApiOutput {
var validFiles []models.FileApiOutput
timeNow := time.Now().Unix()
config := configuration.Get()
for _, element := range database.GetAllMetadata() {
if !includeUploadRequests && element.IsFileRequest() {
continue
}
if element.UserId == user.Id || user.HasPermission(models.UserPermListOtherUploads) {
if !storage.IsExpiredFile(element, timeNow) {
file, err := element.ToFileApiOutput(config.ServerUrl, config.IncludeFilename)
@@ -436,14 +564,13 @@ func apiListSingle(w http.ResponseWriter, r requestParser, user models.User) {
if !ok {
panic("invalid parameter passed")
}
id := strings.TrimPrefix(request.RequestUrl, "/files/list/")
file, ok := storage.GetFile(id)
file, ok := storage.GetFile(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "File not found")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "File not found")
return
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermListOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to view file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to view file")
return
}
config := configuration.Get()
@@ -454,6 +581,77 @@ func apiListSingle(w http.ResponseWriter, r requestParser, user models.User) {
_, _ = w.Write(result)
}
func apiDownloadSingle(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramFilesDownloadSingle)
if !ok {
panic("invalid parameter passed")
}
file, statusCode, errCode, errMessage := checkDownloadAllowed(request.Id, user)
if statusCode != 0 {
sendError(w, statusCode, errCode, errMessage)
return
}
if !request.PresignUrl {
storage.ServeFile(file, w, request.WebRequest, true, request.IncreaseCounter, true)
return
}
createAndOutputPresignedUrl([]string{file.Id}, w, "")
}
func apiDownloadZip(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramFilesDownloadZip)
if !ok {
panic("invalid parameter passed")
}
requestedFiles := make([]models.File, 0)
requestedFileIds := make([]string, 0)
for _, fileId := range request.Ids {
file, statusCode, errCode, errMessage := checkDownloadAllowed(fileId, user)
if statusCode != 0 {
sendError(w, statusCode, errCode, errMessage)
return
}
requestedFiles = append(requestedFiles, file)
requestedFileIds = append(requestedFileIds, file.Id)
}
if !request.PresignUrl {
storage.ServeFilesAsZip(requestedFiles, request.Filename, w, request.WebRequest)
return
}
createAndOutputPresignedUrl(requestedFileIds, w, request.Filename)
}
func checkDownloadAllowed(fileId string, user models.User) (models.File, int, int, string) {
file, ok := storage.GetFile(fileId)
if !ok {
return models.File{}, http.StatusNotFound, errorcodes.NotFound, "file not found"
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermListOtherUploads) {
return models.File{}, http.StatusUnauthorized, errorcodes.NoPermission, "no permission to download file"
}
if file.Encryption.IsEndToEndEncrypted {
return models.File{}, http.StatusBadRequest, errorcodes.EndToEndNotSupported, "End-to-end encrypted files cannot be downloaded"
}
return file, 0, 0, ""
}
func createAndOutputPresignedUrl(ids []string, w http.ResponseWriter, filename string) {
presignUrl := models.Presign{
Id: helper.GenerateRandomString(60),
FileIds: ids,
Expiry: time.Now().Add(time.Second * 30).Unix(),
Filename: filename,
}
database.SavePresignedUrl(presignUrl)
response := struct {
Result string `json:"Result"`
DownloadUrl string `json:"downloadUrl"`
}{"OK", configuration.Get().ServerUrl + "downloadPresigned?key=" + presignUrl.Id}
result, err := json.Marshal(response)
helper.Check(err)
_, _ = w.Write(result)
}
func apiUploadFile(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramFilesAdd)
if !ok {
@@ -461,14 +659,14 @@ func apiUploadFile(w http.ResponseWriter, r requestParser, user models.User) {
}
maxUpload := int64(configuration.Get().MaxFileSizeMB) * 1024 * 1024
if request.Request.ContentLength > maxUpload {
sendError(w, http.StatusBadRequest, storage.ErrorFileTooLarge.Error())
sendError(w, http.StatusBadRequest, errorcodes.FileTooLarge, storage.ErrorFileTooLarge.Error())
return
}
request.Request.Body = http.MaxBytesReader(w, request.Request.Body, maxUpload)
err := fileupload.ProcessCompleteFile(w, request.Request, user.Id, configuration.Get().MaxMemory)
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
sendError(w, http.StatusBadRequest, errorcodes.UnspecifiedError, err.Error())
return
}
}
@@ -480,11 +678,11 @@ func apiDuplicateFile(w http.ResponseWriter, r requestParser, user models.User)
}
file, ok := storage.GetFile(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid id provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid id provided.")
return
}
if file.UserId != user.Id && !user.HasPermission(models.UserPermListOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to duplicate this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to duplicate this file")
return
}
uploadConfig := fileupload.CreateUploadConfig(request.AllowedDownloads,
@@ -493,11 +691,12 @@ func apiDuplicateFile(w http.ResponseWriter, r requestParser, user models.User)
request.UnlimitedTime,
request.UnlimitedDownloads,
false, // is not being used by storage.DuplicateFile
0) // is not being used by storage.DuplicateFile
0, // is not being used by storage.DuplicateFile
"")
uploadConfig.UserId = user.Id
newFile, err := storage.DuplicateFile(file, request.RequestedChanges, request.FileName, uploadConfig)
if err != nil {
sendError(w, http.StatusInternalServerError, err.Error())
sendError(w, http.StatusInternalServerError, errorcodes.InternalServer, err.Error())
return
}
outputFileApiInfo(w, newFile)
@@ -510,16 +709,16 @@ func apiChangeFileOwner(w http.ResponseWriter, r requestParser, user models.User
}
file, ok := storage.GetFile(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid id provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid id provided.")
return
}
if !user.HasPermission(models.UserPermEditOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to edit this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to edit this file")
return
}
_, exists := database.GetUser(request.NewOwner)
if !exists {
sendError(w, http.StatusBadRequest, "User does not exist")
sendError(w, http.StatusBadRequest, errorcodes.NotFound, "User does not exist")
return
}
file.UserId = request.NewOwner
@@ -534,21 +733,25 @@ func apiReplaceFile(w http.ResponseWriter, r requestParser, user models.User) {
}
fileOriginal, ok := storage.GetFile(request.Id)
if !ok {
sendError(w, http.StatusNotFound, "Invalid id provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid id provided.")
return
}
if fileOriginal.UserId != user.Id && !user.HasPermission(models.UserPermReplaceOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to replace this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to replace this file")
return
}
if fileOriginal.IsFileRequest() {
sendError(w, http.StatusBadRequest, errorcodes.UnsupportedFile, "Cannot replace a file request upload")
return
}
fileNewContent, ok := storage.GetFile(request.IdNewContent)
if !ok {
sendError(w, http.StatusNotFound, "Invalid id provided.")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "Invalid id provided.")
return
}
if fileNewContent.UserId != user.Id && !user.HasPermission(models.UserPermListOtherUploads) {
sendError(w, http.StatusUnauthorized, "No permission to duplicate this file")
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to duplicate this file")
return
}
@@ -556,11 +759,11 @@ func apiReplaceFile(w http.ResponseWriter, r requestParser, user models.User) {
if err != nil {
switch {
case errors.Is(err, storage.ErrorReplaceE2EFile):
sendError(w, http.StatusBadRequest, "End-to-End encrypted files cannot be replaced")
sendError(w, http.StatusBadRequest, errorcodes.EndToEndNotSupported, "End-to-End encrypted files cannot be replaced")
case errors.Is(err, storage.ErrorFileNotFound):
sendError(w, http.StatusNotFound, "A file with such an ID could not be found")
sendError(w, http.StatusNotFound, errorcodes.NotFound, "A file with such an ID could not be found")
default:
sendError(w, http.StatusBadRequest, err.Error())
sendError(w, http.StatusBadRequest, errorcodes.InvalidUserInput, err.Error())
}
return
}
@@ -595,11 +798,11 @@ func apiModifyUser(w http.ResponseWriter, r requestParser, user models.User) {
return
}
if userEdit.IsSuperAdmin() {
sendError(w, http.StatusBadRequest, "Cannot modify super admin")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot modify super admin")
return
}
if userEdit.IsSameUser(user.Id) {
sendError(w, http.StatusBadRequest, "Cannot modify yourself")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot modify yourself")
return
}
logging.LogUserEdit(userEdit, user)
@@ -628,11 +831,11 @@ func apiChangeUserRank(w http.ResponseWriter, r requestParser, user models.User)
return
}
if userEdit.IsSameUser(user.Id) {
sendError(w, http.StatusBadRequest, "Cannot modify yourself")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot modify yourself")
return
}
if userEdit.IsSuperAdmin() {
sendError(w, http.StatusBadRequest, "Cannot modify super admin")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot modify super admin")
return
}
userEdit.UserLevel = request.NewRank
@@ -646,7 +849,7 @@ func apiChangeUserRank(w http.ResponseWriter, r requestParser, user models.User)
updateApiKeyPermsOnUserPermChange(userEdit.Id, models.UserPermReplaceUploads, false)
updateApiKeyPermsOnUserPermChange(userEdit.Id, models.UserPermManageUsers, false)
default:
sendError(w, http.StatusBadRequest, "invalid rank sent")
sendError(w, http.StatusBadRequest, errorcodes.InvalidUserInput, "invalid rank sent")
return
}
logging.LogUserEdit(userEdit, user)
@@ -662,6 +865,8 @@ func updateApiKeyPermsOnUserPermChange(userId int, userPerm models.UserPermissio
affectedPermission = models.ApiPermReplace
case models.UserPermManageLogs:
affectedPermission = models.ApiPermManageLogs
case models.UserPermGuestUploads:
affectedPermission = models.ApiPermManageFileRequests
default:
return
}
@@ -691,17 +896,17 @@ func apiResetPassword(w http.ResponseWriter, r requestParser, user models.User)
return
}
if userToEdit.IsSuperAdmin() {
sendError(w, http.StatusBadRequest, "Cannot reset pw of super admin")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot reset password of super admin")
return
}
if userToEdit.IsSameUser(user.Id) {
sendError(w, http.StatusBadRequest, "Cannot reset password of yourself")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot reset password of yourself")
return
}
userToEdit.ResetPassword = true
password := ""
if request.NewPassword {
password = helper.GenerateRandomString(configuration.Environment.MinLengthPassword + 2)
password = helper.GenerateRandomString(configuration.GetEnvironment().MinLengthPassword + 2)
userToEdit.Password = configuration.HashPassword(password, false)
}
database.DeleteAllSessionsByUser(userToEdit.Id)
@@ -719,15 +924,27 @@ func apiDeleteUser(w http.ResponseWriter, r requestParser, user models.User) {
return
}
if userToDelete.IsSuperAdmin() {
sendError(w, http.StatusBadRequest, "Cannot delete super admin")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot delete super admin")
return
}
if userToDelete.IsSameUser(user.Id) {
sendError(w, http.StatusBadRequest, "Cannot delete yourself")
sendError(w, http.StatusBadRequest, errorcodes.ResourceCanNotBeEdited, "Cannot delete yourself")
return
}
logging.LogUserDeletion(userToDelete, user)
database.DeleteUser(userToDelete.Id)
for _, fRequest := range database.GetAllFileRequests() {
if fRequest.UserId == userToDelete.Id {
if request.DeleteFiles {
filerequest.Delete(fRequest)
} else {
fRequest.UserId = user.Id
database.SaveFileRequest(fRequest)
}
}
}
for _, file := range database.GetAllMetadata() {
if file.UserId == userToDelete.Id {
if request.DeleteFiles {
@@ -757,7 +974,8 @@ func apiLogsDelete(_ http.ResponseWriter, r requestParser, user models.User) {
func apiE2eGet(w http.ResponseWriter, _ requestParser, user models.User) {
info := database.GetEnd2EndInfo(user.Id)
files := getFilesForUser(user)
// If e2e is supported for upload requests at some point, this needs to be changed
files := getFilesForUser(user, false)
ids := make([]string, len(files))
for i, file := range files {
ids[i] = file.Id
@@ -774,25 +992,170 @@ func apiE2eSet(w http.ResponseWriter, r requestParser, user models.User) {
panic("invalid parameter passed")
}
database.SaveEnd2EndInfo(request.EncryptedInfo, user.Id)
_, _ = w.Write([]byte("\"result\":\"OK\""))
_, _ = w.Write([]byte("{\"result\":\"OK\"}"))
}
func apiURequestDelete(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramURequestDelete)
if !ok {
panic("invalid parameter passed")
}
uploadRequest, ok := database.GetFileRequest(request.Id)
if !ok {
sendError(w, http.StatusNotFound, errorcodes.NotFound, "FileRequest does not exist with the given ID")
return
}
if uploadRequest.UserId != user.Id && !user.HasPermission(models.UserPermDeleteOtherUploads) {
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to delete this upload request")
return
}
filerequest.Delete(uploadRequest)
logging.LogDeleteFileRequest(uploadRequest, user)
_, _ = w.Write([]byte("{\"result\":\"OK\"}"))
}
func isUserAllowedUnlimited(request *paramURequestSave, isNewRequest bool, user models.User) bool {
if user.IsAdmin() {
return true
}
isServerLimitMaxSize := configuration.GetEnvironment().MaxSizeGuestUploadMb != 0
isServerLimitMaxFiles := configuration.GetEnvironment().MaxFilesGuestUpload != 0
if isServerLimitMaxSize {
if (request.IsMaxSizeSet || isNewRequest) &&
(request.MaxSizeMb == 0 || request.MaxSizeMb > configuration.GetEnvironment().MaxSizeGuestUploadMb) {
return false
}
}
if isServerLimitMaxFiles {
if (request.IsMaxFilesSet || isNewRequest) &&
(request.MaxFiles == 0 || request.MaxFiles > configuration.GetEnvironment().MaxFilesGuestUpload) {
return false
}
}
return true
}
func apiURequestSave(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramURequestSave)
if !ok {
panic("invalid parameter passed")
}
uploadRequest := models.FileRequest{}
isNewRequest := request.Id == ""
if !isUserAllowedUnlimited(request, isNewRequest, user) {
sendError(w, http.StatusBadRequest, errorcodes.AdminOnly, "Only admin users can create requests with unlimited size / file count"+
" or values larger than the server's max size / file count")
return
}
if !isNewRequest {
uploadRequest, ok = database.GetFileRequest(request.Id)
if !ok {
sendError(w, http.StatusNotFound, errorcodes.NotFound, "FileRequest does not exist with the given ID")
return
}
if uploadRequest.UserId != user.Id && !user.HasPermission(models.UserPermEditOtherUploads) {
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to edit this upload request")
return
}
} else {
uploadRequest = filerequest.New(user)
apiKey := generateNewKey(false, user.Id, "File Request Public Access", uploadRequest.Id)
uploadRequest.ApiKey = apiKey.Id
}
if request.Name == "" {
if request.IsNameSet || uploadRequest.Name == "" {
uploadRequest.Name = "Unnamed Request"
}
} else {
uploadRequest.Name = request.Name
}
if request.IsExpirySet {
uploadRequest.Expiry = request.Expiry
}
if request.IsMaxFilesSet {
uploadRequest.MaxFiles = request.MaxFiles
}
if request.IsMaxSizeSet {
uploadRequest.MaxSize = request.MaxSizeMb
}
if request.IsNotesSet {
uploadRequest.Notes = request.Notes
}
database.SaveFileRequest(uploadRequest)
uploadRequest, ok = filerequest.Get(uploadRequest.Id)
if isNewRequest {
logging.LogCreateFileRequest(uploadRequest, user)
} else {
logging.LogEditFileRequest(uploadRequest, user)
}
result, err := json.Marshal(uploadRequest)
helper.Check(err)
_, _ = w.Write(result)
}
func apiUploadRequestList(w http.ResponseWriter, _ requestParser, user models.User) {
userRequests := make([]models.FileRequest, 0)
for _, request := range filerequest.GetAll() {
if request.UserId == user.Id || user.HasPermission(models.UserPermListOtherUploads) {
userRequests = append(userRequests, request)
}
}
result, err := json.Marshal(userRequests)
helper.Check(err)
_, _ = w.Write(result)
}
func apiUploadRequestListSingle(w http.ResponseWriter, r requestParser, user models.User) {
request, ok := r.(*paramURequestListSingle)
if !ok {
panic("invalid parameter passed")
}
uploadRequest, ok := filerequest.Get(request.Id)
if !ok {
sendError(w, http.StatusNotFound, errorcodes.NotFound, "FileRequest does not exist with the given ID")
return
}
if uploadRequest.UserId != user.Id && !user.HasPermission(models.UserPermDeleteOtherUploads) {
sendError(w, http.StatusUnauthorized, errorcodes.NoPermission, "No permission to delete this upload request")
return
}
result, err := json.Marshal(uploadRequest)
helper.Check(err)
_, _ = w.Write(result)
}
func isAuthorisedForApi(r *http.Request, routing apiRoute) (models.User, bool) {
apiKey := r.Header.Get("apikey")
user, _, ok := isValidApiKey(apiKey, true, routing.ApiPerm)
keyId := r.Header.Get("apikey")
user, apiKey, ok := isValidApiKey(keyId, true, routing.ApiPerm)
if !ok {
return models.User{}, false
}
// Returns false if a public upload key is used for non-public api call or vice versa
if routing.IsFileRequestApi != apiKey.IsUploadRequestKey() {
return models.User{}, false
}
return user, true
}
// Probably from new API permission system
func sendError(w http.ResponseWriter, errorInt int, errorMessage string) {
func sendError(w http.ResponseWriter, statusCode, errorCode int, errorMessage string) {
if w == nil {
return
}
w.WriteHeader(errorInt)
_, _ = w.Write([]byte("{\"Result\":\"error\",\"ErrorMessage\":\"" + errorMessage + "\"}"))
w.WriteHeader(statusCode)
output := struct {
Result string `json:"Result"`
Message string `json:"ErrorMessage"`
Code int `json:"ErrorCode"`
}{Result: "error", Message: errorMessage, Code: errorCode}
outputBytes, err := json.Marshal(output)
helper.Check(err)
_, _ = w.Write(outputBytes)
}
// publicKeyToApiKey tries to convert a (possible) public key to a private key
+139 -105
View File
@@ -73,14 +73,14 @@ func generateTestData() {
Id: idApiKeyAdmin,
PublicId: idApiKeyAdmin,
FriendlyName: "Admin",
Permissions: models.ApiPermAll,
Permissions: models.ApiPermNone,
UserId: idAdmin,
})
database.SaveApiKey(models.ApiKey{
Id: idApiKeySuperAdmin,
PublicId: idPublicApiKeySuperAdmin,
FriendlyName: "SuperAdmin",
Permissions: models.ApiPermAll,
Permissions: models.ApiPermNone,
UserId: idSuperAdmin,
})
database.SaveMetaData(models.File{
@@ -120,24 +120,23 @@ func getRecorderWithBody(url, apikey, method string, headers []test.Header, body
}
func testAuthorisation(t *testing.T, url string, requiredPermission models.ApiPermission) models.ApiKey {
t.Helper()
w, r := getRecorder(url, "", []test.Header{{}})
Process(w, r)
test.IsEqualBool(t, w.Code != 200, true)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Unauthorized"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`)
w, r = getRecorder(url, "invalid", []test.Header{{}})
Process(w, r)
test.IsEqualBool(t, w.Code != 200, true)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Unauthorized"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`)
newApiKeyUser := generateNewKey(false, idUser, "")
newApiKeyUser := generateNewKey(false, idUser, "", "")
w, r = getRecorder(url, newApiKeyUser.Id, []test.Header{{}})
Process(w, r)
test.IsEqualBool(t, w.Code != 200, true)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Unauthorized"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`)
for _, permission := range getAvailableApiPermissions(t) {
for _, permission := range getAvailableApiPermissions() {
if permission == requiredPermission {
continue
}
@@ -145,16 +144,16 @@ func testAuthorisation(t *testing.T, url string, requiredPermission models.ApiPe
w, r = getRecorder(url, newApiKeyUser.Id, []test.Header{{}})
Process(w, r)
test.IsEqualBool(t, w.Code != 200, true)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Unauthorized"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`)
removePermissionApikey(t, newApiKeyUser.Id, permission)
}
newApiKeyUser.Permissions = models.ApiPermAll
newApiKeyUser.Permissions = getPermissionAll()
newApiKeyUser.RemovePermission(requiredPermission)
database.SaveApiKey(newApiKeyUser)
w, r = getRecorder(url, newApiKeyUser.Id, []test.Header{{}})
Process(w, r)
test.IsEqualBool(t, w.Code != 200, true)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Unauthorized"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Unauthorized","ErrorCode":2}`)
newApiKeyUser.Permissions = models.ApiPermNone
newApiKeyUser.GrantPermission(requiredPermission)
database.SaveApiKey(newApiKeyUser)
@@ -162,9 +161,10 @@ func testAuthorisation(t *testing.T, url string, requiredPermission models.ApiPe
}
type invalidParameterValue struct {
Value string
ErrorMessage string
StatusCode int
Value string
ErrorMessage string
ErrorMessages []string
StatusCode int
}
func testInvalidParameters(t *testing.T, url, apiKey string, validHeaders []test.Header, headerName string, invalidValues []invalidParameterValue) {
@@ -179,12 +179,16 @@ func testInvalidParameters(t *testing.T, url, apiKey string, validHeaders []test
w, r := getRecorderWithBody(url, apiKey, "GET", headers, nil)
Process(w, r)
test.IsEqualInt(t, w.Code, invalidHeader.StatusCode)
test.ResponseBodyContains(t, w, invalidHeader.ErrorMessage)
if len(invalidHeader.ErrorMessages) > 0 {
test.ResponseBodyIsWithAlternate(t, w, invalidHeader.ErrorMessages)
} else {
test.ResponseBodyIs(t, w, invalidHeader.ErrorMessage)
}
if invalidHeader.Value == "" {
w, r = getRecorder(url, apiKey, validHeaders)
Process(w, r)
test.IsEqualInt(t, w.Code, invalidHeader.StatusCode)
test.ResponseBodyContains(t, w, invalidHeader.ErrorMessage)
test.ResponseBodyIs(t, w, invalidHeader.ErrorMessage)
}
}
}
@@ -196,28 +200,32 @@ func testInvalidUserId(t *testing.T, url, apiKey string, validHeaders []test.Hea
var invalidParameter = []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header userid is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header userid is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: strconv.Itoa(idInvalidUser),
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid user id provided."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid user id provided.","ErrorCode":5}`,
StatusCode: 404,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid value in header userid supplied"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid value in header userid supplied","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: strconv.Itoa(idUser),
ErrorMessage: `{"Result":"error","ErrorMessage":"Cannot`,
StatusCode: 400,
Value: strconv.Itoa(idUser),
ErrorMessages: []string{`{"Result":"error","ErrorMessage":"Cannot modify yourself","ErrorCode":19}`,
`{"Result":"error","ErrorMessage":"Cannot delete yourself","ErrorCode":19}`,
`{"Result":"error","ErrorMessage":"Cannot reset password of yourself","ErrorCode":19}`},
StatusCode: 400,
},
{
Value: strconv.Itoa(idSuperAdmin),
ErrorMessage: `{"Result":"error","ErrorMessage":"Cannot`,
StatusCode: 400,
Value: strconv.Itoa(idSuperAdmin),
ErrorMessages: []string{`{"Result":"error","ErrorMessage":"Cannot modify super admin","ErrorCode":19}`,
`{"Result":"error","ErrorMessage":"Cannot delete super admin","ErrorCode":19}`,
`{"Result":"error","ErrorMessage":"Cannot reset password of super admin","ErrorCode":19}`},
StatusCode: 400,
},
}
testInvalidParameters(t, url, apiKey, validHeaders, headerUserId, invalidParameter)
@@ -230,28 +238,31 @@ func testInvalidApiKey(t *testing.T, url, apiKey string, validHeaders []test.Hea
var invalidParameter = []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header targetKey is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header targetKey is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid key ID provided."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid key ID provided.","ErrorCode":5}`,
StatusCode: 404,
},
{
Value: idApiKeySuperAdmin,
ErrorMessage: `{"Result":"error","ErrorMessage":"No permission to `,
StatusCode: 401,
Value: idApiKeySuperAdmin,
ErrorMessages: []string{`{"Result":"error","ErrorMessage":"No permission to delete this API key","ErrorCode":6}`,
`{"Result":"error","ErrorMessage":"No permission to edit this API key","ErrorCode":6}`},
StatusCode: 401,
},
{
Value: idPublicApiKeySuperAdmin,
ErrorMessage: `{"Result":"error","ErrorMessage":"No permission to `,
StatusCode: 401,
Value: idPublicApiKeySuperAdmin,
ErrorMessages: []string{`{"Result":"error","ErrorMessage":"No permission to delete this API key","ErrorCode":6}`,
`{"Result":"error","ErrorMessage":"No permission to edit this API key","ErrorCode":6}`},
StatusCode: 401,
},
{
Value: idApiKeyAdmin,
ErrorMessage: `{"Result":"error","ErrorMessage":"No permission to `,
StatusCode: 401,
Value: idApiKeyAdmin,
ErrorMessages: []string{`{"Result":"error","ErrorMessage":"No permission to delete this API key","ErrorCode":6}`,
`{"Result":"error","ErrorMessage":"No permission to edit this API key","ErrorCode":6}`},
StatusCode: 401,
},
}
testInvalidParameters(t, url, apiKey, validHeaders, headerApiKey, invalidParameter)
@@ -270,17 +281,17 @@ func testInvalidFileId(t *testing.T, url, apiKey string, isReplacingCall bool) {
var invalidParameter = []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header id is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header id is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalidFile",
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid id provided."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid id provided.","ErrorCode":5}`,
StatusCode: 404,
},
{
Value: idFileAdmin,
ErrorMessage: `{"Result":"error","ErrorMessage":"No permission to `,
ErrorMessage: `{"Result":"error","ErrorMessage":"No permission to duplicate this file","ErrorCode":6}`,
StatusCode: 401,
},
}
@@ -293,7 +304,7 @@ func TestInvalidRouting(t *testing.T) {
w, r := getRecorder(apiUrl, "invalid", []test.Header{{}})
Process(w, r)
test.IsEqualInt(t, w.Code, 400)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"Invalid request"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"Invalid request","ErrorCode":1}`)
}
// ## /user/##
@@ -309,22 +320,22 @@ func TestUserCreate(t *testing.T) {
Value: "1234",
}})
Process(w, r)
test.ResponseBodyContains(t, w, `{"id":103,"name":"1234","permissions":0,"userLevel":2,"lastOnline":0,"resetPassword":false}`)
test.ResponseBodyIs(t, w, `{"id":103,"name":"1234","permissions":0,"userLevel":2,"lastOnline":0,"resetPassword":false}`)
var invalidParameter = []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header username is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header username is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "1",
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid username provided."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid username provided.","ErrorCode":6}`,
StatusCode: 400,
},
{
Value: "1234",
ErrorMessage: `{"Result":"error","ErrorMessage":"User already exists."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"User already exists.","ErrorCode":7}`,
StatusCode: 409,
},
}
@@ -350,12 +361,12 @@ func TestUserChangeRank(t *testing.T) {
invalidParameter := []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header newRank is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header newRank is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid rank"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid rank","ErrorCode":4}`,
StatusCode: 400,
},
}
@@ -432,7 +443,7 @@ func testDeleteUserCall(t *testing.T, apiKey string, mode int) {
database.SaveSession("sessionApiDelete", session)
_, ok = database.GetSession("sessionApiDelete")
test.IsEqualBool(t, ok, true)
userApiKey := generateNewKey(false, retrievedUser.Id, "")
userApiKey := generateNewKey(false, retrievedUser.Id, "", "")
_, ok = database.GetApiKey(userApiKey.Id)
test.IsEqualBool(t, ok, true)
testFile := models.File{
@@ -510,17 +521,17 @@ func TestUserModify(t *testing.T) {
invalidParameter := []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header userpermission is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header userpermission is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "PERM_REPLACEE",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission","ErrorCode":4}`,
StatusCode: 400,
},
}
@@ -563,7 +574,7 @@ func TestUserPasswordReset(t *testing.T) {
test.IsEqualBool(t, ok, true)
test.IsEqualBool(t, user.ResetPassword, true)
test.IsEqualString(t, user.Password, "1234")
test.ResponseBodyContains(t, w, `{"Result":"ok","password":""}`)
test.ResponseBodyIs(t, w, `{"Result":"ok","password":""}`)
user.ResetPassword = false
database.SaveUser(user, false)
@@ -699,16 +710,16 @@ func TestIsValidApiKey(t *testing.T) {
test.IsEqualBool(t, ok, true)
test.IsEqualBool(t, key.LastUsed == 0, false)
newApiKey := generateNewKey(false, 5, "")
newApiKey := generateNewKey(false, 5, "", "")
user, _, isValid = isValidApiKey(newApiKey.Id, true, models.ApiPermNone)
test.IsEqualBool(t, isValid, true)
for _, permission := range getAvailableApiPermissions(t) {
for _, permission := range getAvailableApiPermissions() {
_, _, isValid = isValidApiKey(newApiKey.Id, true, permission)
test.IsEqualBool(t, isValid, false)
}
for _, newPermission := range getAvailableApiPermissions(t) {
for _, newPermission := range getAvailableApiPermissions() {
setPermissionApikey(t, newApiKey.Id, newPermission)
for _, permission := range getAvailableApiPermissions(t) {
for _, permission := range getAvailableApiPermissions() {
_, _, isValid = isValidApiKey(newApiKey.Id, true, permission)
test.IsEqualBool(t, isValid, permission == newPermission)
}
@@ -717,7 +728,7 @@ func TestIsValidApiKey(t *testing.T) {
setPermissionApikey(t, newApiKey.Id, models.ApiPermEdit|models.ApiPermDelete)
_, _, isValid = isValidApiKey(newApiKey.Id, true, models.ApiPermEdit)
test.IsEqualBool(t, isValid, true)
_, _, isValid = isValidApiKey(newApiKey.Id, true, models.ApiPermAll)
_, _, isValid = isValidApiKey(newApiKey.Id, true, getPermissionAll())
test.IsEqualBool(t, isValid, false)
_, _, isValid = isValidApiKey(newApiKey.Id, true, models.ApiPermView)
test.IsEqualBool(t, isValid, false)
@@ -736,7 +747,7 @@ func removePermissionApikey(t *testing.T, key string, newPermission models.ApiPe
database.SaveApiKey(apiKey)
}
func getAvailableApiPermissions(t *testing.T) []models.ApiPermission {
func getAvailableApiPermissions() []models.ApiPermission {
result := []models.ApiPermission{
models.ApiPermView,
models.ApiPermUpload,
@@ -745,17 +756,21 @@ func getAvailableApiPermissions(t *testing.T) []models.ApiPermission {
models.ApiPermEdit,
models.ApiPermReplace,
models.ApiPermManageUsers,
models.ApiPermManageLogs}
sum := 0
for _, perm := range result {
sum = sum + int(perm)
}
if sum != int(models.ApiPermAll) {
t.Fatal("List of permissions are incorrect")
models.ApiPermManageLogs,
models.ApiPermManageFileRequests,
models.ApiPermDownload,
}
return result
}
func getPermissionAll() models.ApiPermission {
allPermissions := models.ApiPermNone
for _, permission := range getAvailableApiPermissions() {
allPermissions += permission
}
return allPermissions
}
func getApiPermMap(t *testing.T) map[models.ApiPermission]string {
result := make(map[models.ApiPermission]string)
result[models.ApiPermView] = "PERM_VIEW"
@@ -766,12 +781,14 @@ func getApiPermMap(t *testing.T) map[models.ApiPermission]string {
result[models.ApiPermReplace] = "PERM_REPLACE"
result[models.ApiPermManageUsers] = "PERM_MANAGE_USERS"
result[models.ApiPermManageLogs] = "PERM_MANAGE_LOGS"
result[models.ApiPermManageFileRequests] = "PERM_MANAGE_FILE_REQUESTS"
result[models.ApiPermDownload] = "PERM_DOWNLOAD"
sum := 0
for perm := range result {
sum = sum + int(perm)
}
if sum != int(models.ApiPermAll) {
if sum != int(getPermissionAll()) {
t.Fatal("List of permissions are incorrect")
}
@@ -829,12 +846,12 @@ func TestDeleteApiKey(t *testing.T) {
invalidParameter := []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header targetKey is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header targetKey is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid key ID provided."}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Invalid key ID provided.","ErrorCode":5}`,
StatusCode: 404,
},
}
@@ -926,27 +943,32 @@ func TestApikeyModify(t *testing.T) {
invalidParameter := []invalidParameterValue{
{
Value: "",
ErrorMessage: `{"Result":"error","ErrorMessage":"header permission is required"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"header permission is required","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "PERM_VIEWW",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid permission","ErrorCode":4}`,
StatusCode: 400,
},
{
Value: "PERM_REPLACE",
ErrorMessage: `{"Result":"error","ErrorMessage":"Insufficient user permission for owner to set this API permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Insufficient user permission for owner to set this API permission","ErrorCode":6}`,
StatusCode: 401,
},
{
Value: "PERM_MANAGE_USERS",
ErrorMessage: `{"Result":"error","ErrorMessage":"Insufficient user permission for owner to set this API permission"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"Insufficient user permission for owner to set this API permission","ErrorCode":6}`,
StatusCode: 401,
},
{
Value: "PERM_MANAGE_FILE_REQUESTS",
ErrorMessage: `{"Result":"error","ErrorMessage":"Insufficient user permission for owner to set this API permission","ErrorCode":6}`,
StatusCode: 401,
},
}
@@ -955,6 +977,7 @@ func TestApikeyModify(t *testing.T) {
grantUserPermission(t, idUser, models.UserPermReplaceUploads)
grantUserPermission(t, idUser, models.UserPermManageUsers)
grantUserPermission(t, idUser, models.UserPermManageLogs)
grantUserPermission(t, idUser, models.UserPermGuestUploads)
for permissionUint, permissionString := range getApiPermMap(t) {
test.IsEqualBool(t, retrievedApiKey.HasPermission(permissionUint), false)
@@ -970,6 +993,7 @@ func TestApikeyModify(t *testing.T) {
removeUserPermission(t, idUser, models.UserPermReplaceUploads)
removeUserPermission(t, idUser, models.UserPermManageUsers)
removeUserPermission(t, idUser, models.UserPermManageLogs)
removeUserPermission(t, idUser, models.UserPermGuestUploads)
}
func testApiModifyCall(t *testing.T, apiKey, targetKey string, permission string, grant bool) {
@@ -1034,12 +1058,12 @@ func TestDeleteFile(t *testing.T) {
test.IsEqualBool(t, ok, true)
apiKey := testAuthorisation(t, "/files/delete", models.ApiPermDelete)
testDeleteFileCall(t, apiKey.Id, "", "", 400, `{"Result":"error","ErrorMessage":"header id is required"}`)
testDeleteFileCall(t, apiKey.Id, "invalid", "", 404, `{"Result":"error","ErrorMessage":"Invalid file ID provided."}`)
testDeleteFileCall(t, apiKey.Id, "smalltestfile1", "invalid", 400, `{"Result":"error","ErrorMessage":"invalid value in header delay supplied"}`)
testDeleteFileCall(t, apiKey.Id, "", "", 400, `{"Result":"error","ErrorMessage":"header id is required","ErrorCode":4}`)
testDeleteFileCall(t, apiKey.Id, "invalid", "", 404, `{"Result":"error","ErrorMessage":"Invalid file ID provided.","ErrorCode":5}`)
testDeleteFileCall(t, apiKey.Id, "smalltestfile1", "invalid", 400, `{"Result":"error","ErrorMessage":"invalid value in header delay supplied","ErrorCode":4}`)
testDeleteFileCall(t, apiKey.Id, "smalltestfile1", "", 200, "")
testDeleteFileCall(t, apiKey.Id, "smalltestfileDelay", "1", 200, "")
testDeleteFileCall(t, apiKey.Id, "smalltestfile2", "", 401, `{"Result":"error","ErrorMessage":"No permission to delete this file"}`)
testDeleteFileCall(t, apiKey.Id, "smalltestfile2", "", 401, `{"Result":"error","ErrorMessage":"No permission to delete this file","ErrorCode":6}`)
_, ok = database.GetMetaDataById("smalltestfile2")
test.IsEqualBool(t, ok, true)
grantUserPermission(t, idUser, models.UserPermDeleteOtherUploads)
@@ -1076,7 +1100,7 @@ func testDeleteFileCall(t *testing.T, apiKey, fileId, delay string, resultCode i
Process(w, r)
test.IsEqualInt(t, w.Code, resultCode)
if expectedResponse != "" {
test.ResponseBodyContains(t, w, expectedResponse)
test.ResponseBodyIs(t, w, expectedResponse)
}
defer test.ExpectPanic(t)
@@ -1109,10 +1133,10 @@ func TestRestoreFile(t *testing.T) {
test.IsEqualBool(t, ok, true)
apiKey := testAuthorisation(t, "/files/restore", models.ApiPermDelete)
testRestoreFileCall(t, apiKey.Id, "", 400, `{"Result":"error","ErrorMessage":"header id is required"}`)
testRestoreFileCall(t, apiKey.Id, "invalid", 404, `{"Result":"error","ErrorMessage":"Invalid file ID provided or file has already been deleted."}`)
testRestoreFileCall(t, apiKey.Id, "", 400, `{"Result":"error","ErrorMessage":"header id is required","ErrorCode":4}`)
testRestoreFileCall(t, apiKey.Id, "invalid", 404, `{"Result":"error","ErrorMessage":"Invalid file ID provided or file has already been deleted.","ErrorCode":5}`)
testRestoreFileCall(t, apiKey.Id, fileUser.Id, 200, fileUser.ToJsonResult(config.ServerUrl, config.IncludeFilename))
testRestoreFileCall(t, apiKey.Id, fileAdmin.Id, 401, `{"Result":"error","ErrorMessage":"No permission to restore this file"}`)
testRestoreFileCall(t, apiKey.Id, fileAdmin.Id, 401, `{"Result":"error","ErrorMessage":"No permission to restore this file","ErrorCode":6}`)
storage.DeleteFileSchedule(fileUser.Id, 500, true)
storage.DeleteFileSchedule(fileAdmin.Id, 500, true)
@@ -1125,7 +1149,7 @@ func TestRestoreFile(t *testing.T) {
test.IsEqualBool(t, file.PendingDeletion != 0, true)
testRestoreFileCall(t, apiKey.Id, fileUser.Id, 200, fileUser.ToJsonResult(config.ServerUrl, config.IncludeFilename))
testRestoreFileCall(t, apiKey.Id, fileAdmin.Id, 401, `{"Result":"error","ErrorMessage":"No permission to restore this file"}`)
testRestoreFileCall(t, apiKey.Id, fileAdmin.Id, 401, `{"Result":"error","ErrorMessage":"No permission to restore this file","ErrorCode":6}`)
file, ok = database.GetMetaDataById(fileUser.Id)
test.IsEqualBool(t, ok, true)
@@ -1181,7 +1205,7 @@ func testRestoreFileCall(t *testing.T, apiKey, fileId string, resultCode int, ex
Process(w, r)
test.IsEqualInt(t, w.Code, resultCode)
if expectedResponse != "" {
test.ResponseBodyContains(t, w, expectedResponse)
test.ResponseBodyIs(t, w, expectedResponse)
}
defer test.ExpectPanic(t)
@@ -1196,7 +1220,7 @@ func TestList(t *testing.T) {
w, r := getRecorder(apiUrl, apiKey.Id, []test.Header{})
Process(w, r)
test.IsEqualInt(t, w.Code, 200)
test.ResponseBodyContains(t, w, "null")
test.ResponseBodyIs(t, w, "null")
generateTestData()
var result []models.FileApiOutput
@@ -1234,11 +1258,11 @@ func TestListSingle(t *testing.T) {
w, r = getRecorder(apiUrl+"e4TjE7CokWK0giiLNxDL", apiKey.Id, []test.Header{})
Process(w, r)
test.IsEqualInt(t, w.Code, 401)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"No permission to view file"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"No permission to view file","ErrorCode":6}`)
w, r = getRecorder(apiUrl+"invalid", apiKey.Id, []test.Header{})
Process(w, r)
test.IsEqualInt(t, w.Code, 404)
test.ResponseBodyContains(t, w, `{"Result":"error","ErrorMessage":"File not found"}`)
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"File not found","ErrorCode":5}`)
grantUserPermission(t, idUser, models.UserPermListOtherUploads)
w, r = getRecorder(apiUrl+"e4TjE7CokWK0giiLNxDL", apiKey.Id, []test.Header{})
@@ -1254,6 +1278,9 @@ func TestListSingle(t *testing.T) {
}
func TestUpload(t *testing.T) {
apiKey := generateNewKey(false, idUser, "", "")
apiKey.GrantPermission(models.ApiPermUpload)
database.SaveApiKey(apiKey)
result, body := uploadNewFile(t)
test.IsEqualString(t, result.Result, "OK")
test.IsEqualString(t, result.FileInfo.Size, "3 B")
@@ -1263,10 +1290,10 @@ func TestUpload(t *testing.T) {
// newFileId := result.FileInfo.Id
w, r := test.GetRecorder("POST", "/api/files/add", nil, []test.Header{{
Name: "apikey",
Value: "validkey",
Value: apiKey.Id,
}}, body)
Process(w, r)
test.ResponseBodyContains(t, w, "Content-Type isn't multipart/form-data")
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"request Content-Type isn't multipart/form-data","ErrorCode":0}`)
test.IsEqualInt(t, w.Code, 400)
defer test.ExpectPanic(t)
@@ -1291,7 +1318,7 @@ func uploadNewFile(t *testing.T) (models.Result, *bytes.Buffer) {
test.IsNil(t, err)
err = writer.Close()
test.IsNil(t, err)
newApiKeyUser := generateNewKey(true, idUser, "")
newApiKeyUser := generateNewKey(true, idUser, "", "")
w, r := test.GetRecorder("POST", "/api/files/add", nil, []test.Header{{
Name: "apikey",
Value: newApiKeyUser.Id,
@@ -1320,7 +1347,7 @@ func TestDuplicate(t *testing.T) {
invalidParameter := []invalidParameterValue{
{
Value: "invalid",
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid value in header allowedDownloads supplied"}`,
ErrorMessage: `{"Result":"error","ErrorMessage":"invalid value in header allowedDownloads supplied","ErrorCode":4}`,
StatusCode: 400,
},
}
@@ -1407,6 +1434,9 @@ func TestDuplicate(t *testing.T) {
}
func TestChunkUpload(t *testing.T) {
apiKey := generateNewKey(false, idUser, "", "")
apiKey.GrantPermission(models.ApiPermUpload)
database.SaveApiKey(apiKey)
err := os.WriteFile("test/tmpupload", []byte("chunktestfile"), 0600)
test.IsNil(t, err)
body, formcontent := test.FileToMultipartFormBody(t, test.HttpTestConfig{
@@ -1425,12 +1455,12 @@ func TestChunkUpload(t *testing.T) {
})
w, r := test.GetRecorder("POST", "/api/chunk/add", nil, []test.Header{{
Name: "apikey",
Value: "validkey",
Value: apiKey.Id,
}}, body)
r.Header.Add("Content-Type", formcontent)
Process(w, r)
test.IsEqualInt(t, w.Code, 200)
test.ResponseBodyContains(t, w, "OK")
test.ResponseBodyIs(t, w, `{"result":"OK"}`)
body, formcontent = test.FileToMultipartFormBody(t, test.HttpTestConfig{
UploadFileName: "test/tmpupload",
@@ -1448,20 +1478,24 @@ func TestChunkUpload(t *testing.T) {
})
w, r = test.GetRecorder("POST", "/api/chunk/add", nil, []test.Header{{
Name: "apikey",
Value: "validkey",
Value: apiKey.Id,
}}, body)
r.Header.Add("Content-Type", formcontent)
Process(w, r)
test.IsEqualInt(t, w.Code, 400)
test.ResponseBodyContains(t, w, "error")
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"strconv.ParseInt: parsing \"\": invalid syntax","ErrorCode":10}`)
defer test.ExpectPanic(t)
apiChunkAdd(w, &paramAuthCreate{}, models.User{Id: 7})
}
func TestChunkComplete(t *testing.T) {
apiKey := generateNewKey(false, idUser, "", "")
apiKey.GrantPermission(models.ApiPermUpload)
database.SaveApiKey(apiKey)
w, r := test.GetRecorder("POST", "/api/chunk/complete", nil, []test.Header{
{Name: "apikey", Value: "validkey"},
{Name: "apikey", Value: apiKey.Id},
{Name: "uuid", Value: "tmpupload123"},
{Name: "filename", Value: "test.upload"},
{Name: "filesize", Value: "13"}},
@@ -1483,13 +1517,13 @@ func TestChunkComplete(t *testing.T) {
// data.Set("filesize", "15")
w, r = test.GetRecorder("POST", "/api/chunk/complete", nil, []test.Header{
{Name: "apikey", Value: "validkey"},
{Name: "apikey", Value: apiKey.Id},
{Name: "uuid", Value: "tmpupload123"},
{Name: "filename", Value: "test.upload"},
{Name: "filesize", Value: "15"}}, nil)
Process(w, r)
test.IsEqualInt(t, w.Code, 400)
test.ResponseBodyContains(t, w, "error")
test.ResponseBodyIs(t, w, `{"Result":"error","ErrorMessage":"chunk file does not exist","ErrorCode":0}`)
defer test.ExpectPanic(t)
apiChunkComplete(w, &paramAuthCreate{}, models.User{Id: 7})
@@ -1497,7 +1531,7 @@ func TestChunkComplete(t *testing.T) {
func TestMinorFunctions(t *testing.T) {
outputFileJson(nil, models.File{})
sendError(nil, 0, "none")
sendError(nil, 0, 0, "none")
}
func testReplaceFileCall(t *testing.T, apiKey string, fileTarget, fileOrigin string, deleteFile bool, resultCode int, expectedResponse string) {
@@ -1520,7 +1554,7 @@ func testReplaceFileCall(t *testing.T, apiKey string, fileTarget, fileOrigin str
Process(w, r)
test.IsEqualInt(t, w.Code, resultCode)
if expectedResponse != "" {
test.ResponseBodyContains(t, w, expectedResponse)
test.ResponseBodyIs(t, w, expectedResponse)
}
defer test.ExpectPanic(t)
@@ -1612,13 +1646,13 @@ func TestFileReplace(t *testing.T) {
test.IsEqualBool(t, ok, true)
apiKey := testAuthorisation(t, "/files/replace", models.ApiPermReplace)
testReplaceFileCall(t, apiKey.Id, "", "invalid", false, 400, `{"Result":"error","ErrorMessage":"header id is required"}`)
testReplaceFileCall(t, apiKey.Id, "invalid", "", false, 400, `{"Result":"error","ErrorMessage":"header idNewContent is required"}`)
testReplaceFileCall(t, apiKey.Id, "invalid", originalFile.Id, false, 404, `{"Result":"error","ErrorMessage":"Invalid id provided."}`)
testReplaceFileCall(t, apiKey.Id, originalFile.Id, "invalid", false, 404, `{"Result":"error","ErrorMessage":"Invalid id provided."}`)
testReplaceFileCall(t, apiKey.Id, originalFile.Id, adminFile.Id, false, 401, `{"Result":"error","ErrorMessage":"No permission to duplicate this file"}`)
testReplaceFileCall(t, apiKey.Id, adminFile.Id, originalFile.Id, false, 401, `{"Result":"error","ErrorMessage":"No permission to replace this file"}`)
testReplaceFileCall(t, apiKey.Id, e2eFile.Id, originalFile.Id, false, 400, `{"Result":"error","ErrorMessage":"End-to-End encrypted files cannot be replaced"}`)
testReplaceFileCall(t, apiKey.Id, "", "invalid", false, 400, `{"Result":"error","ErrorMessage":"header id is required","ErrorCode":4}`)
testReplaceFileCall(t, apiKey.Id, "invalid", "", false, 400, `{"Result":"error","ErrorMessage":"header idNewContent is required","ErrorCode":4}`)
testReplaceFileCall(t, apiKey.Id, "invalid", originalFile.Id, false, 404, `{"Result":"error","ErrorMessage":"Invalid id provided.","ErrorCode":5}`)
testReplaceFileCall(t, apiKey.Id, originalFile.Id, "invalid", false, 404, `{"Result":"error","ErrorMessage":"Invalid id provided.","ErrorCode":5}`)
testReplaceFileCall(t, apiKey.Id, originalFile.Id, adminFile.Id, false, 401, `{"Result":"error","ErrorMessage":"No permission to duplicate this file","ErrorCode":6}`)
testReplaceFileCall(t, apiKey.Id, adminFile.Id, originalFile.Id, false, 401, `{"Result":"error","ErrorMessage":"No permission to replace this file","ErrorCode":6}`)
testReplaceFileCall(t, apiKey.Id, e2eFile.Id, originalFile.Id, false, 400, `{"Result":"error","ErrorMessage":"End-to-End encrypted files cannot be replaced","ErrorCode":17}`)
testReplaceFileCall(t, apiKey.Id, originalFile.Id, newFile.Id, false, 200, "")
file, ok := database.GetMetaDataById(originalFile.Id)
+2 -2
View File
@@ -1,5 +1,5 @@
// Code generated by updateApiRouting.go - DO NOT EDIT.
package api
const versionReadable = "2.1.0"
const versionInt = 20100
const versionReadable = "2.2.0-dev"
const versionInt = 20200
@@ -0,0 +1,24 @@
package errorcodes
const (
UnspecifiedError = iota
InvalidUrl
InvalidApiKey
AdminOnly
CannotParse
NotFound
NoPermission
AlreadyExists
InternalServer
FileTooLarge
InvalidUserInput
ChunkTooSmall
InvalidChunkReservation
CannotAllocateFile
RequestExpired
CannotUploadMoreFiles
RateLimited
EndToEndNotSupported
UnsupportedFile
ResourceCanNotBeEdited
)
+239 -20
View File
@@ -14,16 +14,15 @@ import (
)
type apiRoute struct {
Url string // The API endpoint
HasWildcard bool // True if the endpoint contains the ID as a sub-URL
AdminOnly bool // True if the endpoint requires admin/superadmin permissions
ApiPerm models.ApiPermission // Required permission to access the endpoint
RequestParser requestParser // Parser for the supplied parameters
execution apiFunc // Execution function for the endpoint
Url string // The API endpoint
HasWildcard bool // True if the endpoint contains the ID as a sub-URL
IsFileRequestApi bool // True if the endpoint is used for public uploads
AdminOnly bool // True if the endpoint requires admin/superadmin permissions
ApiPerm models.ApiPermission // Required permission to access the endpoint
RequestParser requestParser // Parser for the supplied parameters
execution apiFunc // Execution function for the endpoint
}
const base64Prefix = "base64:"
func (r apiRoute) Continue(w http.ResponseWriter, request requestParser, user models.User) {
r.execution(w, request, user)
}
@@ -43,6 +42,20 @@ var routes = []apiRoute{
execution: apiConfigInfo,
RequestParser: nil,
},
{
Url: "/files/download/",
ApiPerm: models.ApiPermDownload,
execution: apiDownloadSingle,
HasWildcard: true,
RequestParser: &paramFilesDownloadSingle{},
},
{
Url: "/files/downloadzip",
ApiPerm: models.ApiPermDownload,
execution: apiDownloadZip,
HasWildcard: true,
RequestParser: &paramFilesDownloadZip{},
},
{
Url: "/files/changeOwner",
ApiPerm: models.ApiPermEdit,
@@ -54,7 +67,7 @@ var routes = []apiRoute{
Url: "/files/list",
ApiPerm: models.ApiPermView,
execution: apiList,
RequestParser: nil,
RequestParser: &paramFilesListAll{},
},
{
Url: "/files/list/",
@@ -165,6 +178,59 @@ var routes = []apiRoute{
execution: apiResetPassword,
RequestParser: &paramUserResetPw{},
},
{
Url: "/uploadrequest/list",
ApiPerm: models.ApiPermManageFileRequests,
execution: apiUploadRequestList,
RequestParser: nil,
},
{
Url: "/uploadrequest/list/",
ApiPerm: models.ApiPermManageFileRequests,
execution: apiUploadRequestListSingle,
HasWildcard: true,
RequestParser: &paramURequestListSingle{},
},
{
Url: "/uploadrequest/save",
ApiPerm: models.ApiPermManageFileRequests,
execution: apiURequestSave,
RequestParser: &paramURequestSave{},
},
{
Url: "/uploadrequest/delete",
ApiPerm: models.ApiPermManageFileRequests,
execution: apiURequestDelete,
RequestParser: &paramURequestDelete{},
},
{
Url: "/uploadrequest/chunk/add",
ApiPerm: models.ApiPermNone,
execution: apiChunkUploadRequestAdd,
IsFileRequestApi: true,
RequestParser: &paramChunkUploadRequestAdd{},
},
{
Url: "/uploadrequest/chunk/complete",
ApiPerm: models.ApiPermNone,
IsFileRequestApi: true,
execution: apiChunkUploadRequestComplete,
RequestParser: &paramChunkUploadRequestComplete{},
},
{
Url: "/uploadrequest/chunk/reserve",
ApiPerm: models.ApiPermNone,
IsFileRequestApi: true,
execution: apiChunkReserve,
RequestParser: &paramChunkReserve{},
},
{
Url: "/uploadrequest/chunk/unreserve",
ApiPerm: models.ApiPermNone,
IsFileRequestApi: true,
execution: apiChunkUnreserve,
RequestParser: &paramChunkUnreserve{},
},
{
Url: "/logs/delete",
ApiPerm: models.ApiPermManageLogs,
@@ -205,12 +271,53 @@ type requestParser interface {
New() requestParser
}
type paramFilesListAll struct {
ShowFileRequests bool `header:"showFileRequests"`
foundHeaders map[string]bool
}
func (p *paramFilesListAll) ProcessParameter(_ *http.Request) error {
return nil
}
type paramFilesListSingle struct {
RequestUrl string
Id string
}
func (p *paramFilesListSingle) ProcessParameter(r *http.Request) error {
p.RequestUrl = parseRequestUrl(r)
url := parseRequestUrl(r)
p.Id = strings.TrimPrefix(url, "/files/list/")
return nil
}
type paramFilesDownloadSingle struct {
Id string
WebRequest *http.Request
IncreaseCounter bool `header:"increaseCounter"`
PresignUrl bool `header:"presignUrl"`
foundHeaders map[string]bool
}
func (p *paramFilesDownloadSingle) ProcessParameter(r *http.Request) error {
p.WebRequest = r
url := parseRequestUrl(r)
p.Id = strings.TrimPrefix(url, "/files/download/")
return nil
}
type paramFilesDownloadZip struct {
Ids []string
WebRequest *http.Request
FileIds string `header:"ids" required:"true"`
Filename string `header:"filename" supportBase64:"true"`
IncreaseCounter bool `header:"increaseCounter"`
PresignUrl bool `header:"presignUrl"`
foundHeaders map[string]bool
}
func (p *paramFilesDownloadZip) ProcessParameter(r *http.Request) error {
p.Ids = strings.Split(p.FileIds, ",")
p.WebRequest = r
return nil
}
@@ -427,6 +534,8 @@ func (p *paramUserModify) ProcessParameter(_ *http.Request) error {
p.Permission = models.UserPermManageApiKeys
case "PERM_USERS":
p.Permission = models.UserPermManageUsers
case "PERM_GUEST_UPLOAD":
p.Permission = models.UserPermGuestUploads
default:
return errors.New("invalid permission")
}
@@ -491,9 +600,28 @@ func (p *paramChunkAdd) ProcessParameter(r *http.Request) error {
return nil
}
func (p *paramChunkAdd) GetRequest() *http.Request {
return p.Request
}
type paramChunkUploadRequestAdd struct {
Request *http.Request
FileRequestId string `header:"fileRequestId" required:"true"`
ApiKey string `header:"apikey"` // not published in API documentation
foundHeaders map[string]bool
}
func (p *paramChunkUploadRequestAdd) ProcessParameter(r *http.Request) error {
p.Request = r
return nil
}
func (p *paramChunkUploadRequestAdd) GetRequest() *http.Request {
return p.Request
}
type paramChunkComplete struct {
Uuid string `header:"uuid" required:"true"`
FileName string `header:"filename" required:"true"`
FileName string `header:"filename" required:"true" supportBase64:"true"`
FileSize int64 `header:"filesize" required:"true"`
RealSize int64 `header:"realsize"` // not published in API documentation
ContentType string `header:"contenttype"`
@@ -538,14 +666,6 @@ func (p *paramChunkComplete) ProcessParameter(_ *http.Request) error {
}
}
if strings.HasPrefix(p.FileName, base64Prefix) {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.FileName, base64Prefix))
if err != nil {
return err
}
p.FileName = string(decoded)
}
if p.ContentType == "" {
p.ContentType = "application/octet-stream"
}
@@ -557,6 +677,105 @@ func (p *paramChunkComplete) ProcessParameter(_ *http.Request) error {
return nil
}
type paramChunkReserve struct {
Id string `header:"id" required:"true"`
ApiKey string `header:"apikey"` // not published in API documentation
foundHeaders map[string]bool
}
func (p *paramChunkReserve) ProcessParameter(_ *http.Request) error {
return nil
}
type paramChunkUnreserve struct {
Id string `header:"id" required:"true"`
Uuid string `header:"uuid" required:"true"`
ApiKey string `header:"apikey"` // not published in API documentation
foundHeaders map[string]bool
}
func (p *paramChunkUnreserve) ProcessParameter(_ *http.Request) error {
return nil
}
type paramChunkUploadRequestComplete struct {
Uuid string `header:"uuid" required:"true"`
FileName string `header:"filename" required:"true" supportBase64:"true"`
FileRequestId string `header:"fileRequestId" required:"true"`
FileSize int64 `header:"filesize" required:"true"`
ContentType string `header:"contenttype"`
IsNonBlocking bool `header:"nonblocking"`
ApiKey string `header:"apikey"` // not published in API documentation
FileHeader chunking.FileHeader
foundHeaders map[string]bool
}
func (p *paramChunkUploadRequestComplete) ProcessParameter(_ *http.Request) error {
if p.ContentType == "" {
p.ContentType = "application/octet-stream"
}
p.FileHeader = chunking.FileHeader{
Filename: p.FileName,
ContentType: p.ContentType,
Size: p.FileSize,
}
return nil
}
type paramURequestDelete struct {
Id string `header:"id" required:"true"`
foundHeaders map[string]bool
}
func (p *paramURequestDelete) ProcessParameter(_ *http.Request) error {
return nil
}
type paramURequestSave struct {
Id string `header:"id"`
Name string `header:"name" supportBase64:"true"`
Notes string `header:"notes" supportBase64:"true"`
Expiry int64 `header:"expiry"`
MaxFiles int `header:"maxfiles"`
MaxSizeMb int `header:"maxsize"`
IsNameSet bool
IsExpirySet bool
IsMaxFilesSet bool
IsMaxSizeSet bool
IsNotesSet bool
foundHeaders map[string]bool
}
func (p *paramURequestSave) ProcessParameter(_ *http.Request) error {
if p.foundHeaders["name"] {
p.IsNameSet = true
}
if p.foundHeaders["expiry"] {
p.IsExpirySet = true
}
if p.foundHeaders["maxfiles"] {
p.IsMaxFilesSet = true
}
if p.foundHeaders["maxsize"] {
p.IsMaxSizeSet = true
}
if p.foundHeaders["notes"] {
p.IsNotesSet = true
}
return nil
}
type paramURequestListSingle struct {
Id string
}
func (p *paramURequestListSingle) ProcessParameter(r *http.Request) error {
url := parseRequestUrl(r)
p.Id = strings.TrimPrefix(url, "/uploadrequest/list/")
return nil
}
func checkHeaderExists(r *http.Request, key string, isRequired, isString bool) (bool, error) {
if r.Header.Get(key) != "" {
return true, nil
+494 -1
View File
@@ -2,13 +2,43 @@
package api
import (
"encoding/base64"
"fmt"
"net/http"
"strings"
)
// Do not modify: This is an automatically generated file created by updateApiRouting.go
// It contains the code that is used to parse the headers submitted in an API request
// ParseRequest reads r and saves the passed header values in the paramFilesListAll struct
// In the end, ProcessParameter() is called
func (p *paramFilesListAll) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "showFileRequests", required: false
exists, err = checkHeaderExists(r, "showFileRequests", false, false)
if err != nil {
return err
}
p.foundHeaders["showFileRequests"] = exists
if exists {
p.ShowFileRequests, err = parseHeaderBool(r, "showFileRequests")
if err != nil {
return fmt.Errorf("invalid value in header showFileRequests supplied")
}
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramFilesListAll struct
func (p *paramFilesListAll) New() requestParser {
return &paramFilesListAll{}
}
// ParseRequest parses the header file. As paramFilesListSingle has no fields with the
// tag header, this method does nothing, except calling ProcessParameter()
func (p *paramFilesListSingle) ParseRequest(r *http.Request) error {
@@ -20,6 +50,115 @@ func (p *paramFilesListSingle) New() requestParser {
return &paramFilesListSingle{}
}
// ParseRequest reads r and saves the passed header values in the paramFilesDownloadSingle struct
// In the end, ProcessParameter() is called
func (p *paramFilesDownloadSingle) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "increaseCounter", required: false
exists, err = checkHeaderExists(r, "increaseCounter", false, false)
if err != nil {
return err
}
p.foundHeaders["increaseCounter"] = exists
if exists {
p.IncreaseCounter, err = parseHeaderBool(r, "increaseCounter")
if err != nil {
return fmt.Errorf("invalid value in header increaseCounter supplied")
}
}
// RequestParser header value "presignUrl", required: false
exists, err = checkHeaderExists(r, "presignUrl", false, false)
if err != nil {
return err
}
p.foundHeaders["presignUrl"] = exists
if exists {
p.PresignUrl, err = parseHeaderBool(r, "presignUrl")
if err != nil {
return fmt.Errorf("invalid value in header presignUrl supplied")
}
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramFilesDownloadSingle struct
func (p *paramFilesDownloadSingle) New() requestParser {
return &paramFilesDownloadSingle{}
}
// ParseRequest reads r and saves the passed header values in the paramFilesDownloadZip struct
// In the end, ProcessParameter() is called
func (p *paramFilesDownloadZip) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "ids", required: true
exists, err = checkHeaderExists(r, "ids", true, true)
if err != nil {
return err
}
p.foundHeaders["ids"] = exists
if exists {
p.FileIds = r.Header.Get("ids")
}
// RequestParser header value "filename", required: false, has base64support
exists, err = checkHeaderExists(r, "filename", false, true)
if err != nil {
return err
}
p.foundHeaders["filename"] = exists
if exists {
p.Filename = r.Header.Get("filename")
if strings.HasPrefix(p.Filename, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.Filename, "base64:"))
if err != nil {
return err
}
p.Filename = string(decoded)
}
}
// RequestParser header value "increaseCounter", required: false
exists, err = checkHeaderExists(r, "increaseCounter", false, false)
if err != nil {
return err
}
p.foundHeaders["increaseCounter"] = exists
if exists {
p.IncreaseCounter, err = parseHeaderBool(r, "increaseCounter")
if err != nil {
return fmt.Errorf("invalid value in header increaseCounter supplied")
}
}
// RequestParser header value "presignUrl", required: false
exists, err = checkHeaderExists(r, "presignUrl", false, false)
if err != nil {
return err
}
p.foundHeaders["presignUrl"] = exists
if exists {
p.PresignUrl, err = parseHeaderBool(r, "presignUrl")
if err != nil {
return fmt.Errorf("invalid value in header presignUrl supplied")
}
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramFilesDownloadZip struct
func (p *paramFilesDownloadZip) New() requestParser {
return &paramFilesDownloadZip{}
}
// ParseRequest parses the header file. As paramFilesAdd has no fields with the
// tag header, this method does nothing, except calling ProcessParameter()
func (p *paramFilesAdd) ParseRequest(r *http.Request) error {
@@ -724,6 +863,41 @@ func (p *paramChunkAdd) New() requestParser {
return &paramChunkAdd{}
}
// ParseRequest reads r and saves the passed header values in the paramChunkUploadRequestAdd struct
// In the end, ProcessParameter() is called
func (p *paramChunkUploadRequestAdd) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "fileRequestId", required: true
exists, err = checkHeaderExists(r, "fileRequestId", true, true)
if err != nil {
return err
}
p.foundHeaders["fileRequestId"] = exists
if exists {
p.FileRequestId = r.Header.Get("fileRequestId")
}
// RequestParser header value "apikey", required: false
exists, err = checkHeaderExists(r, "apikey", false, true)
if err != nil {
return err
}
p.foundHeaders["apikey"] = exists
if exists {
p.ApiKey = r.Header.Get("apikey")
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramChunkUploadRequestAdd struct
func (p *paramChunkUploadRequestAdd) New() requestParser {
return &paramChunkUploadRequestAdd{}
}
// ParseRequest reads r and saves the passed header values in the paramChunkComplete struct
// In the end, ProcessParameter() is called
func (p *paramChunkComplete) ParseRequest(r *http.Request) error {
@@ -741,7 +915,7 @@ func (p *paramChunkComplete) ParseRequest(r *http.Request) error {
p.Uuid = r.Header.Get("uuid")
}
// RequestParser header value "filename", required: true
// RequestParser header value "filename", required: true, has base64support
exists, err = checkHeaderExists(r, "filename", true, true)
if err != nil {
return err
@@ -749,6 +923,13 @@ func (p *paramChunkComplete) ParseRequest(r *http.Request) error {
p.foundHeaders["filename"] = exists
if exists {
p.FileName = r.Header.Get("filename")
if strings.HasPrefix(p.FileName, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.FileName, "base64:"))
if err != nil {
return err
}
p.FileName = string(decoded)
}
}
// RequestParser header value "filesize", required: true
@@ -856,3 +1037,315 @@ func (p *paramChunkComplete) ParseRequest(r *http.Request) error {
func (p *paramChunkComplete) New() requestParser {
return &paramChunkComplete{}
}
// ParseRequest reads r and saves the passed header values in the paramChunkReserve struct
// In the end, ProcessParameter() is called
func (p *paramChunkReserve) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "id", required: true
exists, err = checkHeaderExists(r, "id", true, true)
if err != nil {
return err
}
p.foundHeaders["id"] = exists
if exists {
p.Id = r.Header.Get("id")
}
// RequestParser header value "apikey", required: false
exists, err = checkHeaderExists(r, "apikey", false, true)
if err != nil {
return err
}
p.foundHeaders["apikey"] = exists
if exists {
p.ApiKey = r.Header.Get("apikey")
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramChunkReserve struct
func (p *paramChunkReserve) New() requestParser {
return &paramChunkReserve{}
}
// ParseRequest reads r and saves the passed header values in the paramChunkUnreserve struct
// In the end, ProcessParameter() is called
func (p *paramChunkUnreserve) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "id", required: true
exists, err = checkHeaderExists(r, "id", true, true)
if err != nil {
return err
}
p.foundHeaders["id"] = exists
if exists {
p.Id = r.Header.Get("id")
}
// RequestParser header value "uuid", required: true
exists, err = checkHeaderExists(r, "uuid", true, true)
if err != nil {
return err
}
p.foundHeaders["uuid"] = exists
if exists {
p.Uuid = r.Header.Get("uuid")
}
// RequestParser header value "apikey", required: false
exists, err = checkHeaderExists(r, "apikey", false, true)
if err != nil {
return err
}
p.foundHeaders["apikey"] = exists
if exists {
p.ApiKey = r.Header.Get("apikey")
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramChunkUnreserve struct
func (p *paramChunkUnreserve) New() requestParser {
return &paramChunkUnreserve{}
}
// ParseRequest reads r and saves the passed header values in the paramChunkUploadRequestComplete struct
// In the end, ProcessParameter() is called
func (p *paramChunkUploadRequestComplete) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "uuid", required: true
exists, err = checkHeaderExists(r, "uuid", true, true)
if err != nil {
return err
}
p.foundHeaders["uuid"] = exists
if exists {
p.Uuid = r.Header.Get("uuid")
}
// RequestParser header value "filename", required: true, has base64support
exists, err = checkHeaderExists(r, "filename", true, true)
if err != nil {
return err
}
p.foundHeaders["filename"] = exists
if exists {
p.FileName = r.Header.Get("filename")
if strings.HasPrefix(p.FileName, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.FileName, "base64:"))
if err != nil {
return err
}
p.FileName = string(decoded)
}
}
// RequestParser header value "fileRequestId", required: true
exists, err = checkHeaderExists(r, "fileRequestId", true, true)
if err != nil {
return err
}
p.foundHeaders["fileRequestId"] = exists
if exists {
p.FileRequestId = r.Header.Get("fileRequestId")
}
// RequestParser header value "filesize", required: true
exists, err = checkHeaderExists(r, "filesize", true, false)
if err != nil {
return err
}
p.foundHeaders["filesize"] = exists
if exists {
p.FileSize, err = parseHeaderInt64(r, "filesize")
if err != nil {
return fmt.Errorf("invalid value in header filesize supplied")
}
}
// RequestParser header value "contenttype", required: false
exists, err = checkHeaderExists(r, "contenttype", false, true)
if err != nil {
return err
}
p.foundHeaders["contenttype"] = exists
if exists {
p.ContentType = r.Header.Get("contenttype")
}
// RequestParser header value "nonblocking", required: false
exists, err = checkHeaderExists(r, "nonblocking", false, false)
if err != nil {
return err
}
p.foundHeaders["nonblocking"] = exists
if exists {
p.IsNonBlocking, err = parseHeaderBool(r, "nonblocking")
if err != nil {
return fmt.Errorf("invalid value in header nonblocking supplied")
}
}
// RequestParser header value "apikey", required: false
exists, err = checkHeaderExists(r, "apikey", false, true)
if err != nil {
return err
}
p.foundHeaders["apikey"] = exists
if exists {
p.ApiKey = r.Header.Get("apikey")
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramChunkUploadRequestComplete struct
func (p *paramChunkUploadRequestComplete) New() requestParser {
return &paramChunkUploadRequestComplete{}
}
// ParseRequest reads r and saves the passed header values in the paramURequestDelete struct
// In the end, ProcessParameter() is called
func (p *paramURequestDelete) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "id", required: true
exists, err = checkHeaderExists(r, "id", true, true)
if err != nil {
return err
}
p.foundHeaders["id"] = exists
if exists {
p.Id = r.Header.Get("id")
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramURequestDelete struct
func (p *paramURequestDelete) New() requestParser {
return &paramURequestDelete{}
}
// ParseRequest reads r and saves the passed header values in the paramURequestSave struct
// In the end, ProcessParameter() is called
func (p *paramURequestSave) ParseRequest(r *http.Request) error {
var err error
var exists bool
p.foundHeaders = make(map[string]bool)
// RequestParser header value "id", required: false
exists, err = checkHeaderExists(r, "id", false, true)
if err != nil {
return err
}
p.foundHeaders["id"] = exists
if exists {
p.Id = r.Header.Get("id")
}
// RequestParser header value "name", required: false, has base64support
exists, err = checkHeaderExists(r, "name", false, true)
if err != nil {
return err
}
p.foundHeaders["name"] = exists
if exists {
p.Name = r.Header.Get("name")
if strings.HasPrefix(p.Name, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.Name, "base64:"))
if err != nil {
return err
}
p.Name = string(decoded)
}
}
// RequestParser header value "notes", required: false, has base64support
exists, err = checkHeaderExists(r, "notes", false, true)
if err != nil {
return err
}
p.foundHeaders["notes"] = exists
if exists {
p.Notes = r.Header.Get("notes")
if strings.HasPrefix(p.Notes, "base64:") {
decoded, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(p.Notes, "base64:"))
if err != nil {
return err
}
p.Notes = string(decoded)
}
}
// RequestParser header value "expiry", required: false
exists, err = checkHeaderExists(r, "expiry", false, false)
if err != nil {
return err
}
p.foundHeaders["expiry"] = exists
if exists {
p.Expiry, err = parseHeaderInt64(r, "expiry")
if err != nil {
return fmt.Errorf("invalid value in header expiry supplied")
}
}
// RequestParser header value "maxfiles", required: false
exists, err = checkHeaderExists(r, "maxfiles", false, false)
if err != nil {
return err
}
p.foundHeaders["maxfiles"] = exists
if exists {
p.MaxFiles, err = parseHeaderInt(r, "maxfiles")
if err != nil {
return fmt.Errorf("invalid value in header maxfiles supplied")
}
}
// RequestParser header value "maxsize", required: false
exists, err = checkHeaderExists(r, "maxsize", false, false)
if err != nil {
return err
}
p.foundHeaders["maxsize"] = exists
if exists {
p.MaxSizeMb, err = parseHeaderInt(r, "maxsize")
if err != nil {
return fmt.Errorf("invalid value in header maxsize supplied")
}
}
return p.ProcessParameter(r)
}
// New returns a new instance of paramURequestSave struct
func (p *paramURequestSave) New() requestParser {
return &paramURequestSave{}
}
// ParseRequest parses the header file. As paramURequestListSingle has no fields with the
// tag header, this method does nothing, except calling ProcessParameter()
func (p *paramURequestListSingle) ParseRequest(r *http.Request) error {
return p.ProcessParameter(r)
}
// New returns a new instance of paramURequestListSingle struct
func (p *paramURequestListSingle) New() requestParser {
return &paramURequestListSingle{}
}
@@ -0,0 +1,37 @@
package users
import (
"errors"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/models"
)
const minLengthUser = 2
var ErrorNameToShort = errors.New("username too short")
var ErrorUserExists = errors.New("user already exists")
func Create(name string) (models.User, error) {
if len(name) < minLengthUser {
return models.User{}, ErrorNameToShort
}
_, ok := database.GetUserByName(name)
if ok {
return models.User{}, ErrorUserExists
}
newUser := models.User{
Name: name,
UserLevel: models.UserLevelUser,
}
if configuration.GetEnvironment().PermRequestGrantedByDefault {
newUser.GrantPermission(models.UserPermGuestUploads)
}
database.SaveUser(newUser, true)
newUser, ok = database.GetUserByName(name)
if !ok {
return models.User{}, errors.New("user could not be created")
}
return newUser, nil
}
+61 -22
View File
@@ -1,18 +1,25 @@
package fileupload
import (
"errors"
"io"
"net/http"
"strconv"
"time"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/logging"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/storage"
"github.com/forceu/gokapi/internal/storage/chunking"
"io"
"net/http"
"strconv"
"time"
"github.com/forceu/gokapi/internal/storage/chunking/chunkreservation"
"github.com/forceu/gokapi/internal/webserver/api/errorcodes"
)
const minChunkSize = 5 * 1024 * 1024
const minChunkSizeLowMaxChunk = 1 * 1024 * 1024
// ProcessCompleteFile processes a file upload request
// This is only used when a complete file is uploaded through the API with /files/add
// Normally a file is created from a chunk
@@ -37,66 +44,92 @@ func ProcessCompleteFile(w http.ResponseWriter, r *http.Request, userId, maxMemo
return err
}
user, _ := database.GetUser(userId)
logging.LogUpload(result, user)
// Returns empty fr if the file is not related to a file request
fr, _ := database.GetFileRequest(config.FileRequestId)
logging.LogUpload(result, user, fr)
_, _ = io.WriteString(w, result.ToJsonResult(config.ExternalUrl, configuration.Get().IncludeFilename))
return nil
}
func isChunkMinChunkSize(r *http.Request, offset, fileSize int64) bool {
minReqChunkSize := minChunkSize
if configuration.Get().ChunkSize < 5 {
minReqChunkSize = minChunkSizeLowMaxChunk
}
if r.ContentLength >= int64(minReqChunkSize) {
return true
}
if r.ContentLength >= (fileSize - offset) {
return true
}
return false
}
// ProcessNewChunk processes a file chunk upload request
func ProcessNewChunk(w http.ResponseWriter, r *http.Request, isApiCall bool) error {
func ProcessNewChunk(w http.ResponseWriter, r *http.Request, isApiCall bool, filerequestId string) (error, int) {
err := r.ParseMultipartForm(int64(configuration.Get().MaxMemory) * 1024 * 1024)
if err != nil {
return err
return err, errorcodes.CannotParse
}
defer r.MultipartForm.RemoveAll()
chunkInfo, err := chunking.ParseChunkInfo(r, isApiCall)
if err != nil {
return err
return err, errorcodes.InvalidUserInput
}
file, header, err := r.FormFile("file")
if err != nil {
return err
return err, errorcodes.InvalidUserInput
}
if !isChunkMinChunkSize(r, chunkInfo.Offset, chunkInfo.TotalFilesizeBytes) {
return storage.ErrorChunkTooSmall, errorcodes.ChunkTooSmall
}
if filerequestId != "" {
if !chunkreservation.SetUploading(filerequestId, chunkInfo.UUID) {
return errors.New("chunk reservation has expired or was not requested"), errorcodes.InvalidChunkReservation
}
}
err = chunking.NewChunk(file, header, chunkInfo)
defer file.Close()
if err != nil {
return err
return err, errorcodes.CannotAllocateFile
}
_, _ = io.WriteString(w, "{\"result\":\"OK\"}")
return nil
return nil, 0
}
// ParseFileHeader parses the parameters for CompleteChunk()
// This is done as two operations, as CompleteChunk can be blocking too long
// for an HTTP request, by calling this function first, r can be closed afterwards
func ParseFileHeader(r *http.Request) (string, chunking.FileHeader, models.UploadRequest, error) {
func ParseFileHeader(r *http.Request) (string, chunking.FileHeader, models.UploadParameters, error) {
err := r.ParseForm()
if err != nil {
return "", chunking.FileHeader{}, models.UploadRequest{}, err
return "", chunking.FileHeader{}, models.UploadParameters{}, err
}
chunkId := r.Form.Get("chunkid")
config, err := parseConfig(r.Form)
if err != nil {
return "", chunking.FileHeader{}, models.UploadRequest{}, err
return "", chunking.FileHeader{}, models.UploadParameters{}, err
}
header, err := chunking.ParseFileHeader(r)
if err != nil {
return "", chunking.FileHeader{}, models.UploadRequest{}, err
return "", chunking.FileHeader{}, models.UploadParameters{}, err
}
return chunkId, header, config, nil
}
// CompleteChunk processes a file after all the chunks have been completed
// The parameters can be generated with ParseFileHeader()
func CompleteChunk(chunkId string, header chunking.FileHeader, userId int, config models.UploadRequest) (models.File, error) {
func CompleteChunk(chunkId string, header chunking.FileHeader, userId int, config models.UploadParameters) (models.File, error) {
return storage.NewFileFromChunk(chunkId, header, userId, config)
}
// CreateUploadConfig populates a new models.UploadRequest struct
func CreateUploadConfig(allowedDownloads, expiryDays int, password string, unlimitedTime, unlimitedDownload, isEnd2End bool, realSize int64) models.UploadRequest {
// CreateUploadConfig populates a new models.UploadParameters struct
func CreateUploadConfig(allowedDownloads, expiryDays int, password string, unlimitedTime, unlimitedDownload, isEnd2End bool, realSize int64, fileRequestId string) models.UploadParameters {
settings := configuration.Get()
return models.UploadRequest{
return models.UploadParameters{
AllowedDownloads: allowedDownloads,
Expiry: expiryDays,
ExpiryTimestamp: time.Now().Add(time.Duration(expiryDays) * time.Hour * 24).Unix(),
@@ -107,10 +140,16 @@ func CreateUploadConfig(allowedDownloads, expiryDays int, password string, unlim
UnlimitedDownload: unlimitedDownload,
IsEndToEndEncrypted: isEnd2End,
RealSize: realSize,
FileRequestId: fileRequestId,
}
}
func parseConfig(values formOrHeader) (models.UploadRequest, error) {
func parseConfig(values formOrHeader) (models.UploadParameters, error) {
fileRequestId := values.Get("fileRequestId")
if fileRequestId != "" {
return CreateUploadConfig(0, 0, "",
true, true, false, 0, fileRequestId), nil
}
allowedDownloads := values.Get("allowedDownloads")
expiryDays := values.Get("expiryDays")
password := values.Get("password")
@@ -140,10 +179,10 @@ func parseConfig(values formOrHeader) (models.UploadRequest, error) {
realSizeStr := values.Get("realSize")
realSize, err = strconv.ParseInt(realSizeStr, 10, 64)
if err != nil {
return models.UploadRequest{}, err
return models.UploadParameters{}, err
}
}
return CreateUploadConfig(allowedDownloadsInt, expiryDaysInt, password, unlimitedTime, unlimitedDownload, isEnd2End, realSize), nil
return CreateUploadConfig(allowedDownloadsInt, expiryDaysInt, password, unlimitedTime, unlimitedDownload, isEnd2End, realSize, ""), nil
}
type formOrHeader interface {
@@ -3,10 +3,6 @@ package fileupload
import (
"bytes"
"encoding/json"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"github.com/forceu/gokapi/internal/test/testconfiguration"
"io"
"mime/multipart"
"net/http"
@@ -16,6 +12,11 @@ import (
"reflect"
"strings"
"testing"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"github.com/forceu/gokapi/internal/test/testconfiguration"
)
func TestMain(m *testing.M) {
@@ -98,17 +99,17 @@ func TestProcess(t *testing.T) {
func TestProcessNewChunk(t *testing.T) {
w, r := test.GetRecorder("POST", "/uploadChunk", nil, nil, strings.NewReader("invalid§$%&%§"))
err := ProcessNewChunk(w, r, false)
err, _ := ProcessNewChunk(w, r, false, "")
test.IsNotNil(t, err)
w = httptest.NewRecorder()
r = getFileUploadRecorder(false)
err = ProcessNewChunk(w, r, false)
err, _ = ProcessNewChunk(w, r, false, "")
test.IsNotNil(t, err)
w = httptest.NewRecorder()
r = getFileUploadRecorder(true)
err = ProcessNewChunk(w, r, false)
err, _ = ProcessNewChunk(w, r, false, "")
test.IsNil(t, err)
response, err := io.ReadAll(w.Result().Body)
test.IsNil(t, err)
+10 -3
View File
@@ -1,20 +1,27 @@
package headers
import (
"github.com/forceu/gokapi/internal/models"
"net/http"
"strconv"
"time"
"github.com/forceu/gokapi/internal/models"
)
// Write sets headers to either display the file inline or to force download, the content type
// and if the file is encrypted, the creation timestamp to now
func Write(file models.File, w http.ResponseWriter, forceDownload bool) {
func Write(file models.File, w http.ResponseWriter, forceDownload, serveDecrypted bool) {
if forceDownload {
w.Header().Set("Content-Disposition", "attachment; filename=\""+file.Name+"\"")
} else {
w.Header().Set("Content-Disposition", "inline; filename=\""+file.Name+"\"")
}
w.Header().Set("Content-Type", file.ContentType)
if !file.RequiresClientDecryption() || serveDecrypted {
w.Header().Set("Content-Type", file.ContentType)
w.Header().Set("Content-Length", strconv.FormatInt(file.SizeBytes, 10))
} else {
w.Header().Set("Content-Type", "application/octet-stream")
}
if file.Encryption.IsEncrypted {
w.Header().Set("Accept-Ranges", "bytes")
+5 -4
View File
@@ -1,22 +1,23 @@
package headers
import (
"testing"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"testing"
)
func TestWriteDownloadHeaders(t *testing.T) {
file := models.File{Name: "testname", ContentType: "testtype"}
w, _ := test.GetRecorder("GET", "/test", nil, nil, nil)
Write(file, w, true)
Write(file, w, true, false)
test.IsEqualString(t, w.Result().Header.Get("Content-Disposition"), "attachment; filename=\"testname\"")
w, _ = test.GetRecorder("GET", "/test", nil, nil, nil)
Write(file, w, false)
Write(file, w, false, false)
test.IsEqualString(t, w.Result().Header.Get("Content-Disposition"), "inline; filename=\"testname\"")
test.IsEqualString(t, w.Result().Header.Get("Content-Type"), "testtype")
file.Encryption.IsEncrypted = true
w, _ = test.GetRecorder("GET", "/test", nil, nil, nil)
Write(file, w, false)
Write(file, w, false, false)
test.IsEqualString(t, w.Result().Header.Get("Accept-Ranges"), "bytes")
}
@@ -31,6 +31,9 @@
{
"name": "auth"
},
{
"name": "uploadrequest"
},
{
"name": "user"
},
@@ -105,6 +108,183 @@
}
}
},
"/files/downloadzip": {
"get": {
"tags": [
"files"
],
"summary": "Downloads files as ZIP file with optionally increasing the download counter",
"description": "This API call downloads multiple file that are not expired and increasing their download counter is disabled by default. Can be set up to return a pre-signed URL instead of the zip file itself, which is valid for 30 seconds and can be accessed by any registered user. End-to-end encrypted files and encrypted files stored on cloud servers cannot be downloaded. Returns 404 if an invalid/expired ID was passed. Requires API permission DOWNLOAD. To download files that were not uploaded by the user, the user needs to have the user permission LIST",
"operationId": "downloadzip",
"parameters": [
{
"name": "ids",
"in": "header",
"required": true,
"schema": {
"type": "string"
},
"description": "IDs of files to be downloaded seperated by comma"
},
{
"name": "filename",
"in": "header",
"required": false,
"schema": {
"type": "string"
},
"description": "The filename for the new Zip file. If the filename includes non-ANSI characters, you can encode them with base64, by adding 'base64:' at the beginning, e.g. 'base64:ZmlsZW5hbWU='"
},
{
"name": "increaseCounter",
"in": "header",
"required": false,
"schema": {
"type": "boolean"
},
"description": "Increase counter if set to true"
},
{
"name": "presignUrl",
"in": "header",
"required": false,
"schema": {
"type": "boolean"
},
"description": "Return a pre-signed URL instead of the actual file. Valid for one download within 30 seconds and can only be used by logged in users. When this option is set, download counter cannot be increased."
}
],
"security": [
{
"apikey": [
"DOWNLOAD"
]
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/octet-stream": {
"schema": {
"type": "object",
"format": "binary"
}
},
"application/json": {
"schema": {
"type": "object",
"properties": {
"Result": {
"type": "string",
"example": "OK"
},
"downloadUrl": {
"type": "string",
"format": "uri",
"example": "http://gokapi.local:53842/downloadPresigned?key=xieph5ae1leph6Heel0Hoo9uth1eiY9xei8IiboPoothie0ahm6tutufoo2s"
}
}
}
}
}
},
"400": {
"description": "Invalid input or trying to download an end-to-end encrypted file"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"404": {
"description": "Invalid ID provided or file has expired"
}
}
}
},
"/files/download/{id}": {
"get": {
"tags": [
"files"
],
"summary": "Downloads file with optionally increasing the download counter",
"description": "This API call downloads a file that is not expired and increasing its download counter is disabled by default. Can be set up to return a pre-signed URL instead of the file itself, which is valid for 30 seconds and can be accessed by any registered user. End-to-end encrypted files and encrypted files stored on cloud servers cannot be downloaded. Returns 404 if an invalid/expired ID was passed. Requires API permission DOWNLOAD. To download files that were not uploaded by the user, the user needs to have the user permission LIST",
"operationId": "downloadsingle",
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"schema": {
"type": "string"
},
"description": "ID of file to be downloaded"
},
{
"name": "increaseCounter",
"in": "header",
"required": false,
"schema": {
"type": "boolean"
},
"description": "Increase counter if set to true"
},
{
"name": "presignUrl",
"in": "header",
"required": false,
"schema": {
"type": "boolean"
},
"description": "Return a pre-signed URL instead of the actual file. Valid for one download within 30 seconds and can only be used by logged in users. When this option is set, download counter cannot be increased."
}
],
"security": [
{
"apikey": [
"DOWNLOAD"
]
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/octet-stream": {
"schema": {
"type": "object",
"format": "binary"
}
},
"application/json": {
"schema": {
"type": "object",
"properties": {
"Result": {
"type": "string",
"example": "OK"
},
"downloadUrl": {
"type": "string",
"format": "uri",
"example": "http://gokapi.local:53842/downloadPresigned?key=xieph5ae1leph6Heel0Hoo9uth1eiY9xei8IiboPoothie0ahm6tutufoo2s"
}
}
}
}
}
},
"400": {
"description": "Invalid input or trying to download an end-to-end encrypted file"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"404": {
"description": "Invalid ID provided or file has expired"
}
}
}
},
"/files/list": {
"get": {
"tags": [
@@ -120,6 +300,17 @@
]
}
],
"parameters": [
{
"name": "showFileRequests",
"in": "header",
"required": false,
"schema": {
"type": "boolean"
},
"description": "Set to true, to include files uploaded through file requests"
}
],
"responses": {
"200": {
"description": "Operation successful",
@@ -199,7 +390,7 @@
"chunk"
],
"summary": "Uploads a new chunk",
"description": "Uploads a file in chunks, in case a reverse proxy does not support upload of larger files. Parallel uploading is supported. Must call /chunk/complete after all chunks have been uploaded. WARNING: Does not support end-to-end encryption! If server is setup to utilise end-to-end encryption, file will be stored in plain-text! To upload an end-to-end encrypted file, use gokapi-cli. Requires API permission UPLOAD",
"description": "Uploads a file in chunks, in case a reverse proxy does not support upload of larger files. Parallel uploading is supported. Must call /chunk/complete after all chunks have been uploaded. WARNING: Does not support end-to-end encryption! If server is setup to utilise end-to-end encryption, file will be stored in plain-text! To upload an end-to-end encrypted file, use gokapi-cli. Chunks must be at least 5MB in size, unless last chunk of file. Requires API permission UPLOAD",
"operationId": "chunkadd",
"security": [
{
@@ -316,6 +507,264 @@
"schema": {
"type": "string"
}
},
{
"name": "nonblocking",
"in": "header",
"description": "If set to true, the call returns without waiting for the file processing to finish.",
"required": false,
"schema": {
"type": "boolean"
}
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/UploadResult"
}
}
}
},
"400": {
"description": "Invalid input"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
}
}
}
},
"/uploadrequest/chunk/reserve": {
"post": {
"tags": [
"uploadrequest"
],
"summary": "Requests a UUID for uploading a new file for a file request",
"description": "Requests an UUID that can be used for uplading a new file. The chunks for the new file have to use this UUID. The first chunk needs to be uploaded latest 4 minutes after requesting the UUID. Requires API key associated with the file request",
"operationId": "chunkreserve",
"security": [
{
"apikey": [
"FileRequest"
]
}
],
"parameters": [
{
"name": "id",
"in": "header",
"description": "The file request ID",
"required": true,
"schema": {
"type": "string"
}
}],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/chunkReserveResult"
}
}
}
},
"400": {
"description": "Invalid ID or the file request does not accept any more files"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"429": {
"description": "If too many chunks are currently requested, the caller has to wait a couple of seconds and try again. The rate limit is only for file requests that are not limited in file count"
}
}
}
},
"/uploadrequest/chunk/unreserve": {
"post": {
"tags": [
"uploadrequest"
],
"summary": "Frees a reserved UUID if upload was cancelled",
"description": "This call frees a reserved UUID, so that it does not count towards the quota anymore. Used if an upload was cancelled or failed. Requires API key associated with the file request",
"operationId": "chunkunreserve",
"security": [
{
"apikey": [
"FileRequest"
]
}
],
"parameters": [
{
"name": "id",
"in": "header",
"description": "The file request ID",
"required": true,
"schema": {
"type": "string"
}
},{
"name": "uuid",
"in": "header",
"description": "The reserved UUID",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
}
}
},
"400": {
"description": "Invalid ID or the file request does not accept any more files"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"429": {
"description": "If too many chunks are currently requested, the caller has to wait a couple of seconds and try again. The rate limit is only for file requests that are not limited in file count"
}
}
}
},
"/uploadrequest/chunk/add": {
"post": {
"tags": [
"uploadrequest"
],
"summary": "Uploads a new chunk for a file request",
"description": "Uploads a file in chunks. Parallel uploading is supported. Must call /uploadrequest/chunk/reserve to request an UUID first and must call /uploadrequest/chunk/complete after all chunks have been uploaded. WARNING: Does not support end-to-end encryption! If server is setup to utilise end-to-end encryption, file will be stored in plain-text! Chunks must be at least 5MB in size, unless last chunk of file. Requires API key associated with the file request",
"operationId": "chunkaddur",
"security": [
{
"apikey": [
"FileRequest"
]
}
],
"parameters": [
{
"name": "fileRequestId",
"in": "header",
"description": "The ID of the upload request",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "nonblocking",
"in": "header",
"description": "If set to true, the call returns without waiting for the file processing to finish.",
"required": false,
"schema": {
"type": "boolean"
}
}
],
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/chunking"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/chunkUploadResult"
}
}
}
},
"400": {
"description": "Invalid input"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
}
}
}
},
"/uploadrequest/chunk/complete": {
"post": {
"tags": [
"uploadrequest"
],
"summary": "Finalises uploaded chunks",
"description": "Needs to be called after all chunks have been uploaded. Adds the uploaded file to Gokapi. Requires API permission UPLOAD",
"operationId": "chunkurcomplete",
"security": [
{
"apikey": [
"FileRequest"
]
}
],
"parameters": [
{
"name": "uuid",
"in": "header",
"description": "The unique ID that was used for the uploaded chunks",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "fileRequestId",
"in": "header",
"description": "The file request ID that was used for the uploaded chunks",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "filename",
"in": "header",
"description": "The filename of the uploaded file. If the filename includes non-ANSI characters, you can encode them with base64, by adding 'base64:' at the beginning, e.g. 'base64:ZmlsZW5hbWU='",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "filesize",
"in": "header",
"description": "The total filesize of the uploaded file in bytes",
"required": true,
"schema": {
"type": "integer"
}
},
{
"name": "contenttype",
"in": "header",
"description": "The MIME content type. If empty, application/octet-stream will be used.",
"required": false,
"schema": {
"type": "string"
}
}
],
"responses": {
@@ -343,7 +792,7 @@
"tags": [
"logs"
],
"summary": "Deletes entries from the logfilek",
"summary": "Deletes entries from the logfile",
"description": "This API call deletes all lines before older than a cutoff date. Requires API permission MANAGE_LOGS and user needs to be admin or super-admin.",
"operationId": "logsdelete",
"security": [
@@ -971,6 +1420,7 @@
"PERM_EDIT",
"PERM_DELETE",
"PERM_REPLACE",
"PERM_MANAGE_FILE_REQUESTS",
"PERM_MANAGE_LOGS",
"PERM_MANAGE_USERS",
"PERM_API_MOD"
@@ -1050,6 +1500,244 @@
}
}
},
"/uploadrequest/list": {
"get": {
"tags": [
"uploadrequest"
],
"summary": "Lists all file requests",
"description": "This API call lists all file requests. Requires API permission GUEST_UPLOAD. To view file requests created by a different user, the user needs to have the user permission LIST",
"operationId": "ulist",
"security": [
{
"apikey": [
"PERM_GUEST_UPLOAD"
]
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"type": "array",
"nullable": false,
"items": {
"$ref": "#/components/schemas/FileRequest"
}
}
}
}
},
"400": {
"description": "Invalid input"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
}
}
}
},
"/uploadrequest/list/{id}": {
"get": {
"tags": [
"uploadrequest"
],
"summary": "Get file request by ID",
"description": "This API call lists a specific file request. Returns 404 if an invalid ID was passed. Requires API permission GUEST_UPLOAD. To view file requests from a different user, the user needs to have the user permission LIST",
"operationId": "ulistbyid",
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"schema": {
"type": "string"
},
"description": "ID of file request"
}
],
"security": [
{
"apikey": [
"PERM_GUEST_UPLOAD"
]
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileRequest"
}
}
}
},
"400": {
"description": "Invalid input"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"404": {
"description": "Invalid ID provided"
}
}
}
},
"/uploadrequest/save": {
"post": {
"tags": [
"uploadrequest"
],
"summary": "Creates a new or saves an existing upload request",
"description": "This API call creates a new upload request if the parameter ID is not submitted. If editing a request, only the submitted parameters will be changed. To save a request of a different user, the user requires the user permission EDIT to execute this call. Requires API permission GUEST_UPLOAD",
"operationId": "uploadrequestsave",
"security": [
{
"apikey": [
"PERM_GUEST_UPLOAD"
]
}
],
"parameters": [
{
"name": "id",
"in": "header",
"description": "The request to be saved. If empty, a new request will be created",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "string"
}
},
{
"name": "name",
"in": "header",
"description": "The given name for the request. If the name includes non-ANSI characters, you can encode them with base64, by adding 'base64:' at the beginning, e.g. 'base64:ZmlsZW5hbWU='",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "string"
}
},
{
"name": "notes",
"in": "header",
"description": "The public notes for the request. If the notes includes non-ANSI characters, you can encode them with base64, by adding 'base64:' at the beginning, e.g. 'base64:ZmlsZW5hbWU='",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "string"
}
},
{
"name": "expiry",
"in": "header",
"description": "The expiry as a UTC unix timestamp. No expiry if 0",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "integer"
}
},
{
"name": "maxfiles",
"in": "header",
"description": "The amount of files that can be uploaded. No limit if 0",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "integer"
}
},
{
"name": "maxsize",
"in": "header",
"description": "The maximum size in Megabytes per file. No limit if 0",
"required": false,
"style": "simple",
"explode": false,
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "Operation successful",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileRequest"
}
}
}
},
"400": {
"description": "Invalid ID or parameters supplied"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"404": {
"description": "Upload request not found"
}
}
}
},
"/uploadrequest/delete": {
"delete": {
"tags": [
"uploadrequest"
],
"summary": "Deletes the upload request and all associated files",
"description": "This API call deletes the given file requests. If files are associated with the request, they will also be deleted. To delete a request of a different user, the user requires the user permission DELETE to execute this call. Requires API permission GUEST_UPLOAD",
"operationId": "uploadrequestdelete",
"security": [
{
"apikey": [
"PERM_GUEST_UPLOAD"
]
}
],
"parameters": [
{
"name": "id",
"in": "header",
"description": "The request to be deleted",
"required": true,
"style": "simple",
"explode": false,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Operation successful"
},
"400": {
"description": "Invalid ID or parameters supplied"
},
"401": {
"description": "Invalid API key provided for authentication or API key does not have the required permission"
},
"404": {
"description": "Upload request not found"
}
}
}
},
"/user/create": {
"post": {
"tags": [
@@ -1156,7 +1844,8 @@
"PERM_DELETE",
"PERM_LOGS",
"PERM_API",
"PERM_USERS"
"PERM_USERS",
"PERM_GUEST_UPLOAD"
]
}
},
@@ -1257,7 +1946,7 @@
"user"
],
"summary": "Deletes the selected user",
"description": "This API call changes deletes the given user. If files are associated with the user, they will be linked with the user that initiated the deletion. If deleteFiles is \"true\", the files will be deleted instead. Requires API permission MANAGE_USERS",
"description": "This API call deletes the given user. If files are associated with the user, they will be linked with the user that initiated the deletion. If deleteFiles is \"true\", the files will be deleted instead. Requires API permission MANAGE_USERS",
"operationId": "userdelete",
"security": [
{
@@ -1397,6 +2086,11 @@
"description": "The public hotlink URL for the file",
"example": "https://gokapi.server/h/tDMs0U8MvRFwK69PfjagI7F87C13UVeQuOGDvtCG.jpg"
},
"FileRequestId": {
"type": "string",
"description": "If the file belongs to an upload request, the ID is set in this field",
"example": "cnMEWsrMwSx1wyr"
},
"UploadDate": {
"type": "integer",
"description": "UTC timestamp of file upload",
@@ -1467,6 +2161,11 @@
"type": "boolean",
"example": "false"
},
"IsFileRequest": {
"description": "True if the file belongs to an upload request",
"type": "boolean",
"example": "true"
},
"UploaderId": {
"description": "The user ID of the uploader",
"type": "integer",
@@ -1476,6 +2175,104 @@
"description": "File is a struct used for saving information about an uploaded file",
"x-go-package": "Gokapi/internal/models"
},
"FileRequest": {
"type": "object",
"description": "Represents a file upload request and its associated metadata.",
"properties": {
"id": {
"type": "string",
"description": "The internal ID of the file request",
"example": "caep3Ooquu6phoo"
},
"userid": {
"type": "integer",
"format": "int32",
"description": "The user ID of the owner",
"example": "2"
},
"maxfiles": {
"type": "integer",
"format": "int32",
"description": "The maximum number of files allowed or 0 if unlimited",
"example": "20"
},
"maxsize": {
"type": "integer",
"format": "int32",
"description": "The maximum file size allowed in MB or 0 if unlimited",
"example": "0"
},
"CombinedMaxSize": {
"type": "integer",
"format": "int32",
"description": "The lesser of MaxSize and the server's max upload size.",
"example": "0"
},
"expiry": {
"type": "integer",
"format": "int64",
"description": "The expiry time of the file request as a Unix timestamp or 0 if no expiry",
"example": "1767022842"
},
"creationdate": {
"type": "integer",
"format": "int64",
"description": "The timestamp when the file request was created",
"example": "1767021842"
},
"name": {
"type": "string",
"description": "The given name for the file request",
"example": "Book list entries"
},
"notes": {
"type": "string",
"description": "The public notes for the file request",
"example": "Please make sure to upload revision 1 files"
},
"apikey": {
"type": "string",
"description": "The API key that is used for uploading files for this request",
"example": "wrg5L7ldIUiXd27mIH1Fh0gGIyrekC"
},
"uploadedfiles": {
"type": "integer",
"format": "int32",
"description": "The number of uploaded files for this request",
"example": "3"
},
"reserveduploads": {
"type": "integer",
"format": "int32",
"description": "The number of current uploads, which have not been finalised yet",
"example": "1"
},
"lastupload": {
"type": "integer",
"format": "int64",
"description": "The timestamp of the last upload",
"example": "1767022002"
},
"totalfilesize": {
"type": "integer",
"format": "int64",
"description": "The total size of all uploaded files in bytes",
"example": "544332214"
},
"fileidlist": {
"type": "array",
"items": {
"type": "string"
},
"description": "An array of the IDs of all uploaded files",
"example": [
"cohng2weGh",
"see5Ohng9y",
"EoYiog4Che"
]
}
}
},
"chunkUploadResult": {
"type": "object",
"properties": {
@@ -1487,6 +2284,21 @@
"description": "Result after uploading a chunk",
"x-go-package": "Gokapi/internal/models"
},
"chunkReserveResult": {
"type": "object",
"properties": {
"Result": {
"type": "string",
"example": "OK"
},
"Uuid": {
"type": "string",
"example": "naPh9athuyeimie3uu8pingoyi2Sho"
}
},
"description": "Result after uploading a chunk",
"x-go-package": "Gokapi/internal/models"
},
"UploadResult": {
"type": "object",
"properties": {
@@ -1640,7 +2452,7 @@
"properties": {
"file": {
"type": "string",
"description": "The file to be uploaded",
"description": "The chunk to be uploaded",
"format": "binary"
},
"uuid": {
@@ -1,7 +0,0 @@
/**
*
* Base64 encode / decode
* http://www.webtoolkit.info/
*
**/
var Base64={_keyStr:"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",encode:function(r){var t,e,o,a,h,n,c,d="",C=0;for(r=Base64._utf8_encode(r);C<r.length;)a=(t=r.charCodeAt(C++))>>2,h=(3&t)<<4|(e=r.charCodeAt(C++))>>4,n=(15&e)<<2|(o=r.charCodeAt(C++))>>6,c=63&o,isNaN(e)?n=c=64:isNaN(o)&&(c=64),d=d+this._keyStr.charAt(a)+this._keyStr.charAt(h)+this._keyStr.charAt(n)+this._keyStr.charAt(c);return d},decode:function(r){var t,e,o,a,h,n,c="",d=0;for(r=r.replace(/[^A-Za-z0-9\+\/\=]/g,"");d<r.length;)t=this._keyStr.indexOf(r.charAt(d++))<<2|(a=this._keyStr.indexOf(r.charAt(d++)))>>4,e=(15&a)<<4|(h=this._keyStr.indexOf(r.charAt(d++)))>>2,o=(3&h)<<6|(n=this._keyStr.indexOf(r.charAt(d++))),c+=String.fromCharCode(t),64!=h&&(c+=String.fromCharCode(e)),64!=n&&(c+=String.fromCharCode(o));return c=Base64._utf8_decode(c)},_utf8_encode:function(r){r=r.replace(/\r\n/g,"\n");for(var t="",e=0;e<r.length;e++){var o=r.charCodeAt(e);o<128?t+=String.fromCharCode(o):o>127&&o<2048?(t+=String.fromCharCode(o>>6|192),t+=String.fromCharCode(63&o|128)):(t+=String.fromCharCode(o>>12|224),t+=String.fromCharCode(o>>6&63|128),t+=String.fromCharCode(63&o|128))}return t},_utf8_decode:function(r){for(var t="",e=0,o=c1=c2=0;e<r.length;)(o=r.charCodeAt(e))<128?(t+=String.fromCharCode(o),e++):o>191&&o<224?(c2=r.charCodeAt(e+1),t+=String.fromCharCode((31&o)<<6|63&c2),e+=2):(c2=r.charCodeAt(e+1),c3=r.charCodeAt(e+2),t+=String.fromCharCode((15&o)<<12|(63&c2)<<6|63&c3),e+=3);return t}};
+261 -45
View File
@@ -28,6 +28,20 @@ body {
-webkit-box-pack: center;
justify-content: center;
}
body::after {
content: "";
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
/* This creates the inset border effect */
box-shadow: inset 0 0 5rem rgba(0, 0, 0, 0.5);
/* Essential: allows clicking buttons/inputs through the shadow */
pointer-events: none;
/* Ensures it stays above the background but below modals if needed */
z-index: 10;
}
td {
vertical-align: middle;
@@ -175,7 +189,7 @@ a:hover {
}
.toastdeprecation {
background-color: #8b0000;
background-color: #8b0000;
}
.toastnotification.show {
@@ -243,9 +257,17 @@ a:hover {
}
@keyframes perm-pulse {
0% { opacity: 1; }
50% { opacity: 0.5; }
100% { opacity: 1; }
0% {
opacity: 1;
}
50% {
opacity: 0.5;
}
100% {
opacity: 1;
}
}
.perm-nochange:hover {
@@ -257,9 +279,20 @@ a:hover {
}
@keyframes perm-nowgranted-pulse {
0% { transform: scale(1.15); color: #4dff4d; }
50% { transform: scale(1.3); color: #008800; }
100% { transform: scale(1.15); color: #0edf00; }
0% {
transform: scale(1.15);
color: #4dff4d;
}
50% {
transform: scale(1.3);
color: #008800;
}
100% {
transform: scale(1.15);
color: #0edf00;
}
}
.perm-nownotgranted {
@@ -267,15 +300,29 @@ a:hover {
}
@keyframes perm-nownotgranted-pulse {
0% { transform: scale(1.15); color: #ff4d4d; }
50% { transform: scale(1.3); color: #ff0000; }
100% { transform: scale(1.15); color: ##9f9999; }
0% {
transform: scale(1.15);
color: #ff4d4d;
}
50% {
transform: scale(1.3);
color: #ff0000;
}
100% {
transform: scale(1.15);
color: ##9f9999;
}
}
.prevent-select {
-webkit-user-select: none; /* Safari */
-ms-user-select: none; /* IE 10 and IE 11 */
user-select: none; /* Standard syntax */
-webkit-user-select: none;
/* Safari */
-ms-user-select: none;
/* IE 10 and IE 11 */
user-select: none;
/* Standard syntax */
}
@@ -287,65 +334,234 @@ a:hover {
/* Define a subtle animation */
@keyframes subtleHighlight {
0% {
background-color: #444950; /* Light gray for dark background */
}
100% {
background-color: transparent; /* Original background */
}
0% {
background-color: #444950;
/* Light gray for dark background */
}
100% {
background-color: transparent;
/* Original background */
}
}
@keyframes subtleHighlightNewJson {
0% {
background-color: green; /* Pale green for new items */
}
100% {
background-color: transparent;
}
0% {
background-color: green;
/* Pale green for new items */
}
100% {
background-color: transparent;
}
}
/* Apply the animation to the updated table cells */
.updatedDownloadCount {
animation: subtleHighlight 0.5s ease-out;
animation: subtleHighlight 0.5s ease-out;
}
.newFileRequest {
animation: subtleHighlightNewJson 0.7s ease-out;
}
.newApiKey {
animation: subtleHighlightNewJson 0.7s ease-out;
animation: subtleHighlightNewJson 0.7s ease-out;
}
.newUser {
animation: subtleHighlightNewJson 0.7s ease-out;
animation: subtleHighlightNewJson 0.7s ease-out;
}
.newItem {
animation: subtleHighlightNewJson 1.5s ease-out;
animation: subtleHighlightNewJson 1.5s ease-out;
}
@keyframes fadeOut {
0% {
opacity: 1;
}
100% {
opacity: 0;
}
0% {
opacity: 1;
}
100% {
opacity: 0;
}
}
.rowDeleting {
animation: fadeOut 0.3s ease-out forwards;
animation: fadeOut 0.3s ease-out forwards;
}
.highlighted-password {
background-color: #444; /* Dark gray background for subtle contrast */
color: #ddd; /* Light gray text */
padding: 2px 6px;
border-radius: 4px;
font-weight: bold;
font-family: monospace;
display: inline-block; /* Keeps the styling inline but ensures proper padding */
margin-left: 8px; /* Adds space between the label and the password */
border: 1px solid #555; /* Slight border to define the element */
background-color: #444;
/* Dark gray background for subtle contrast */
color: #ddd;
/* Light gray text */
padding: 2px 6px;
border-radius: 4px;
font-weight: bold;
font-family: monospace;
display: inline-block;
/* Keeps the styling inline but ensures proper padding */
margin-left: 8px;
/* Adds space between the label and the password */
border: 1px solid #555;
/* Slight border to define the element */
}
/* Slightly lighter than table-dark */
.filelist-item {
background-color: rgba(255, 255, 255, 0.04);
}
.filelist-item:hover {
background-color: rgba(255, 255, 255, 0.08);
}
tr.no-bottom-border td {
border-bottom: none
}
.filerequest-item:hover>td {
background-color: rgba(255, 255, 255, 0.08);
}
.filerequest-item>td {
transition: background-color 0.15s ease-in-out;
}
.collapse-toggle i {
display: inline-block;
transition: transform 0.2s ease;
}
.collapse-toggle[aria-expanded="true"] i {
transform: rotate(180deg);
}
.collapse-toggle:hover {
opacity: 0.8;
}
.collapse-toggle {
padding: 0.25rem;
}
.remove-entry-btn:hover {
opacity: 0.8;
}
.upload-box {
border: 2px dashed #6c757d;
border-radius: 8px;
padding: 2rem;
cursor: pointer;
transition: background-color 0.2s ease;
}
.upload-box:hover {
background-color: rgba(255, 255, 255, 0.05);
}
.info-box {
background-color: rgba(255, 255, 255, 0.05);
border-radius: 6px;
padding: 1rem;
margin-bottom: 1.5rem;
text-align: left;
}
.info-box h6 {
margin-bottom: 0.5rem;
}
.info-box ul {
margin-bottom: 0;
padding-left: 1.2rem;
}
.callout {
padding: 20px;
margin: 10px 20px;
border: 1px solid #eee;
border-left-width: 5px;
border-radius: 3px;
h4 {
margin-top: 0;
margin-bottom: 5px;
}
p:last-child {
margin-bottom: 0;
}
code {
border-radius: 3px;
}
&+.bs-callout {
margin-top: -5px;
}
}
.upload-box {
border: 2px dashed rgba(255, 255, 255, 0.2);
padding: 2rem;
transition: all 0.2s ease;
cursor: pointer;
display: block;
}
.upload-box.highlight {
border-color: #0d6efd;
background-color: rgba(13, 110, 253, 0.05);
}
.pu-file-list {
margin-top: 1.5rem;
}
.pu-file-item {
display: flex;
align-items: center;
padding: 0.5rem 0;
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
font-size: 0.95rem;
}
.pu-file-item .file-name {
flex: 1;
text-align: left;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
margin-right: 1rem;
}
.pu-file-item .upload-status {
width: 350px;
text-align: right;
margin-right: 1rem;
flex-shrink: 0;
opacity: 0.75;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.pu-file-item .file-size {
width: 80px;
text-align: right;
margin-right: 12px;
flex-shrink: 0;
opacity: 0.75;
}
File diff suppressed because one or more lines are too long
+173 -22
View File
@@ -51,7 +51,7 @@ async function getToken(permission, forceRenewal) {
async function apiAuthModify(apiKey, permission, modifier) {
const apiUrl = './api/auth/modify';
const reqPerm = 'PERM_API_MOD';
let token;
try {
@@ -88,7 +88,7 @@ async function apiAuthModify(apiKey, permission, modifier) {
async function apiAuthFriendlyName(apiKey, newName) {
const apiUrl = './api/auth/friendlyname';
const reqPerm = 'PERM_API_MOD';
let token;
try {
@@ -124,7 +124,7 @@ async function apiAuthFriendlyName(apiKey, newName) {
async function apiAuthDelete(apiKey) {
const apiUrl = './api/auth/delete';
const reqPerm = 'PERM_API_MOD';
let token;
try {
@@ -158,7 +158,7 @@ async function apiAuthDelete(apiKey) {
async function apiAuthCreate() {
const apiUrl = './api/auth/create';
const reqPerm = 'PERM_API_MOD';
let token;
try {
@@ -199,7 +199,7 @@ async function apiAuthCreate() {
async function apiChunkComplete(uuid, filename, filesize, realsize, contenttype, allowedDownloads, expiryDays, password, isE2E, nonblocking) {
const apiUrl = './api/chunk/complete';
const reqPerm = 'PERM_UPLOAD';
let token;
try {
@@ -258,7 +258,7 @@ async function apiChunkComplete(uuid, filename, filesize, realsize, contenttype,
async function apiFilesReplace(id, newId) {
const apiUrl = './api/files/replace';
const reqPerm = 'PERM_REPLACE';
let token;
try {
@@ -295,7 +295,7 @@ async function apiFilesReplace(id, newId) {
async function apiFilesListById(fileId) {
const apiUrl = './api/files/list/' + fileId;
const reqPerm = 'PERM_VIEW';
let token;
try {
@@ -304,13 +304,12 @@ async function apiFilesListById(fileId) {
console.error("Unable to gain permission token:", error);
throw error;
}
const requestOptions = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'apikey': token,
'apikey': token
},
};
@@ -328,10 +327,84 @@ async function apiFilesListById(fileId) {
}
async function apiFilesListDownloadSingle(fileId) {
const apiUrl = './api/files/download/' + fileId;
const reqPerm = 'PERM_DOWNLOAD';
let token;
try {
token = await getToken(reqPerm, false);
} catch (error) {
console.error("Unable to gain permission token:", error);
throw error;
}
const requestOptions = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'apikey': token,
'presignUrl': true
},
};
try {
const response = await fetch(apiUrl, requestOptions);
if (!response.ok) {
throw new Error(`Request failed with status: ${response.status}`);
}
const data = await response.json();
return data;
} catch (error) {
console.error("Error in apiFilesListDownloadSingle:", error);
throw error;
}
}
async function apiFilesListDownloadZip(fileIds, filename) {
const apiUrl = './api/files/downloadzip';
const reqPerm = 'PERM_DOWNLOAD';
let token;
try {
token = await getToken(reqPerm, false);
} catch (error) {
console.error("Unable to gain permission token:", error);
throw error;
}
const requestOptions = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'apikey': token,
'ids': fileIds,
'filename': 'base64:' + Base64.encode(filename),
'presignUrl': true
},
};
try {
const response = await fetch(apiUrl, requestOptions);
if (!response.ok) {
throw new Error(`Request failed with status: ${response.status}`);
}
const data = await response.json();
return data;
} catch (error) {
console.error("Error in apiFilesListDownloadZip:", error);
throw error;
}
}
async function apiFilesModify(id, allowedDownloads, expiry, password, originalPw) {
const apiUrl = './api/files/modify';
const reqPerm = 'PERM_EDIT';
let token;
try {
@@ -371,7 +444,7 @@ async function apiFilesModify(id, allowedDownloads, expiry, password, originalPw
async function apiFilesDelete(id, delay) {
const apiUrl = './api/files/delete';
const reqPerm = 'PERM_DELETE';
let token;
try {
@@ -406,7 +479,7 @@ async function apiFilesDelete(id, delay) {
async function apiFilesRestore(id) {
const apiUrl = './api/files/restore';
const reqPerm = 'PERM_DELETE';
let token;
try {
@@ -446,7 +519,7 @@ async function apiFilesRestore(id) {
async function apiUserCreate(userName) {
const apiUrl = './api/user/create';
const reqPerm = 'PERM_MANAGE_USERS';
let token;
try {
@@ -486,7 +559,7 @@ async function apiUserCreate(userName) {
async function apiUserModify(userId, permission, modifier) {
const apiUrl = './api/user/modify';
const reqPerm = 'PERM_MANAGE_USERS';
let token;
try {
@@ -523,7 +596,7 @@ async function apiUserModify(userId, permission, modifier) {
async function apiUserChangeRank(userId, newRank) {
const apiUrl = './api/user/changeRank';
const reqPerm = 'PERM_MANAGE_USERS';
let token;
try {
@@ -558,7 +631,7 @@ async function apiUserChangeRank(userId, newRank) {
async function apiUserDelete(id, deleteFiles) {
const apiUrl = './api/user/delete';
const reqPerm = 'PERM_MANAGE_USERS';
let token;
try {
@@ -594,7 +667,7 @@ async function apiUserDelete(id, deleteFiles) {
async function apiUserResetPassword(id, generatePw) {
const apiUrl = './api/user/resetPassword';
const reqPerm = 'PERM_MANAGE_USERS';
let token;
try {
@@ -632,7 +705,7 @@ async function apiUserResetPassword(id, generatePw) {
async function apiLogsDelete(timestamp) {
const apiUrl = './api/logs/delete';
const reqPerm = 'PERM_MANAGE_LOGS';
let token;
try {
@@ -668,7 +741,7 @@ async function apiLogsDelete(timestamp) {
async function apiE2eGet() {
const apiUrl = './api/e2e/get';
const reqPerm = 'PERM_UPLOAD';
let token;
try {
@@ -703,7 +776,7 @@ async function apiE2eGet() {
async function apiE2eStore(content) {
const apiUrl = './api/e2e/set';
const reqPerm = 'PERM_UPLOAD';
let token;
try {
@@ -720,7 +793,7 @@ async function apiE2eStore(content) {
'apikey': token
},
body: JSON.stringify({
content: content
'content': content
}),
};
@@ -734,3 +807,81 @@ async function apiE2eStore(content) {
throw error;
}
}
// Upload Requests
async function apiURequestDelete(id) {
const apiUrl = './api/uploadrequest/delete';
const reqPerm = 'PERM_MANAGE_FILE_REQUESTS';
let token;
try {
token = await getToken(reqPerm, false);
} catch (error) {
console.error("Unable to gain permission token:", error);
throw error;
}
const requestOptions = {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
'apikey': token,
'id': id
},
};
try {
const response = await fetch(apiUrl, requestOptions);
if (!response.ok) {
throw new Error(`Request failed with status: ${response.status}`);
}
} catch (error) {
console.error("Error in apiURequestDelete:", error);
throw error;
}
}
async function apiURequestSave(id, name, maxfiles, maxsize, expiry, notes) {
const apiUrl = './api/uploadrequest/save';
const reqPerm = 'PERM_MANAGE_FILE_REQUESTS';
let token;
try {
token = await getToken(reqPerm, false);
} catch (error) {
console.error("Unable to gain permission token:", error);
throw error;
}
const requestOptions = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'apikey': token,
'id': id,
'name': 'base64:' + Base64.encode(name),
'expiry': expiry,
'maxfiles': maxfiles,
'maxsize': maxsize,
'notes': 'base64:' + Base64.encode(notes),
},
};
try {
const response = await fetch(apiUrl, requestOptions);
if (!response.ok) {
throw new Error(`Request failed with status: ${response.status}`);
}
const data = await response.json();
return data;
} catch (error) {
console.error("Error in apiURequestDelete:", error);
throw error;
}
}
@@ -5,8 +5,7 @@
try {
var clipboard = new ClipboardJS('.copyurl');
} catch (ignored) {
}
} catch (ignored) {}
var toastId;
@@ -28,3 +27,80 @@ function hideToast() {
document.getElementById("toastnotification").classList.remove("show");
}
var calendarInstance = null;
function createCalendar(element, timestamp) {
const expiryDate = new Date(timestamp * 1000);
calendarInstance = flatpickr(document.getElementById(element), {
enableTime: true,
dateFormat: 'U', // Unix timestamp
altInput: true,
altFormat: 'Y-m-d H:i',
allowInput: true,
time_24hr: true,
defaultDate: expiryDate,
minDate: 'today',
});
}
function handleEditCheckboxChange(checkbox) {
var targetElement = document.getElementById(checkbox.getAttribute("data-toggle-target"));
var timestamp = checkbox.getAttribute("data-timestamp");
if (checkbox.checked) {
targetElement.classList.remove("disabled");
targetElement.removeAttribute("disabled");
if (timestamp != null) {
calendarInstance._input.disabled = false;
}
} else {
if (timestamp != null) {
calendarInstance._input.disabled = true;
}
targetElement.classList.add("disabled");
targetElement.setAttribute("disabled", true);
}
}
function downloadFileWithPresign(id) {
apiFilesListDownloadSingle(id)
.then(data => {
if (!data.hasOwnProperty("downloadUrl")) {
throw new Error("Unable to get presigned key");
}
const a = document.createElement('a');
a.href = data.downloadUrl;
a.style.display = 'none';
document.body.appendChild(a);
a.click();
a.remove();
})
.catch(error => {
alert("Unable to download: " + error);
console.error('Error:', error);
});
}
function downloadFilesZipWithPresign(ids, filename) {
apiFilesListDownloadZip(ids, filename)
.then(data => {
if (!data.hasOwnProperty("downloadUrl")) {
throw new Error("Unable to get presigned key");
}
const a = document.createElement('a');
a.href = data.downloadUrl;
a.style.display = 'none';
document.body.appendChild(a);
a.click();
a.remove();
})
.catch(error => {
alert("Unable to download: " + error);
console.error('Error:', error);
});
}
@@ -209,7 +209,7 @@ function addRowApi(apiKey, publicId) {
},
{
perm: 'PERM_UPLOAD',
icon: 'bi-file-earmark-arrow-up',
icon: 'bi-file-earmark-plus',
granted: true,
title: 'Upload'
},
@@ -231,6 +231,18 @@ function addRowApi(apiKey, publicId) {
granted: false,
title: 'Replace Uploads'
},
{
perm: 'PERM_DOWNLOAD',
icon: 'bi-box-arrow-in-down',
granted: false,
title: 'Download Files'
},
{
perm: 'PERM_MANAGE_FILE_REQUESTS',
icon: 'bi-file-earmark-arrow-up',
granted: false,
title: 'Manage File Requests'
},
{
perm: 'PERM_MANAGE_USERS',
icon: 'bi-people',
@@ -283,6 +295,11 @@ function addRowApi(apiKey, publicId) {
cell.classList.add("perm-unavailable");
cell.classList.add("perm-nochange");
}
if (!canCreateFileRequest) {
let cell = document.getElementById("perm_manage_file_requests_" + publicId);
cell.classList.add("perm-unavailable");
cell.classList.add("perm-nochange");
}
setTimeout(() => {
cellFriendlyName.classList.remove("newApiKey");
@@ -0,0 +1,363 @@
// This file contains JS code for the API view
// All files named admin_*.js will be merged together and minimised by calling
// go generate ./...
function deleteFileRequest(requestId) {
document.getElementById("delete-" + requestId).disabled = true;
apiURequestDelete(requestId)
.then(data => {
const mainRow = document.getElementById("row-" + requestId);
const fileListRow = document.getElementById("filelist-" + requestId);
mainRow.classList.add("rowDeleting");
if (fileListRow !== null) {
fileListRow.classList.add("rowDeleting");
}
setTimeout(() => {
mainRow.remove();
if (fileListRow !== null) {
fileListRow.remove();
}
}, 290);
})
.catch(error => {
alert("Unable to delete file request: " + error);
console.error('Error:', error);
});
}
function deleteOrShowModal(requestId, requestName, count) {
if (count === 0) {
deleteFileRequest(requestId);
} else {
showDeleteFRequestModal(requestId, requestName, count);
}
}
function showDeleteFRequestModal(requestId, requestName, count) {
document.getElementById("deleteModalBodyName").innerText = requestName;
document.getElementById("deleteModalBodyCount").innerText = count;
$('#deleteModal').modal('show');
document.getElementById("buttonDelete").onclick = function() {
$('#deleteModal').modal('hide');
deleteFileRequest(requestId);
};
}
function newFileRequest() {
loadFileRequestDefaults();
document.getElementById("m_urequestlabel").innerText = "New File Request";
$('#addEditModal').modal('show');
document.getElementById("b_fr_save").onclick = function() {
saveFileRequestDefaults();
saveFileRequest();
$('#addEditModal').modal('hide');
};
}
function saveFileRequestDefaults() {
if (document.getElementById("mc_maxfiles").checked) {
localStorage.setItem("fr_maxfiles", document.getElementById("mi_maxfiles").value);
} else {
localStorage.setItem("fr_maxfiles", 0);
}
if (document.getElementById("mc_maxsize").checked) {
localStorage.setItem("fr_maxsize", document.getElementById("mi_maxsize").value);
} else {
localStorage.setItem("fr_maxsize", 0);
}
if (document.getElementById("mc_expiry").checked) {
let diff = document.getElementById("mi_expiry").value - Math.round(Date.now() / 1000);
localStorage.setItem("fr_expiry", diff);
} else {
localStorage.setItem("fr_expiry", 0);
}
}
function loadFileRequestDefaults() {
const defaultMaxFiles = localStorage.getItem("fr_maxfiles");
const defaultMaxSize = localStorage.getItem("fr_maxsize");
let defaultExpiry = localStorage.getItem("fr_expiry");
if (defaultExpiry !== "0" && defaultExpiry !== null) {
let defaultDate = new Date(Date.now() + Number((defaultExpiry) * 1000));
defaultDate.setHours(12, 0, 0, 0);
defaultExpiry = Math.floor(defaultDate.getTime() / 1000);
}
setModalValues("", "", defaultMaxFiles, defaultMaxSize, defaultExpiry, "");
}
function setModalValues(id, name, maxFiles, maxSize, expiry, notes) {
document.getElementById("freqId").value = id;
if (name === null) {
document.getElementById("mFriendlyName").value = "";
} else {
document.getElementById("mFriendlyName").value = name;
}
if (limitMaxFiles != 0) {
let checkbox = document.getElementById("mc_maxfiles");
if (maxFiles === null || maxFiles == 0) {
maxFiles = limitMaxFiles;
}
checkbox.checked = true;
checkbox.disabled = true;
checkbox.title = "Only admins can set this to unlimited";
checkbox.value = "1";
document.getElementById("mi_maxfiles").setAttribute("max", limitMaxFiles);
} else {
let checkbox = document.getElementById("mc_maxfiles");
checkbox.disabled = false;
checkbox.title = "";
document.getElementById("mi_maxfiles").setAttribute("max", "");
}
if (limitMaxSize != 0) {
let checkbox = document.getElementById("mc_maxsize");
if (maxSize === null || maxSize == 0) {
maxSize = limitMaxSize;
}
checkbox.checked = true;
checkbox.disabled = true;
checkbox.title = "Only admins can set this to unlimited";
checkbox.value = "1";
document.getElementById("mi_maxsize").setAttribute("max", limitMaxSize);
} else {
let checkbox = document.getElementById("mc_maxsize");
checkbox.disabled = false;
checkbox.title = "";
document.getElementById("mi_maxsize").setAttribute("max", "");
}
if (maxFiles === null || maxFiles == 0) {
document.getElementById("mi_maxfiles").value = "1";
document.getElementById("mi_maxfiles").disabled = true;
document.getElementById("mc_maxfiles").checked = false;
} else {
document.getElementById("mi_maxfiles").value = maxFiles;
document.getElementById("mi_maxfiles").disabled = false;
document.getElementById("mc_maxfiles").checked = true;
}
if (maxSize === null || maxSize == 0) {
document.getElementById("mi_maxsize").value = "10";
document.getElementById("mi_maxsize").disabled = true;
document.getElementById("mc_maxsize").checked = false;
} else {
document.getElementById("mi_maxsize").value = maxSize;
document.getElementById("mi_maxsize").disabled = false;
document.getElementById("mc_maxsize").checked = true;
}
if (expiry === null || expiry == 0) {
const defaultDate = Math.floor(new Date(Date.now() + (14 * 24 * 60 * 60 * 1000)).getTime() / 1000);
document.getElementById("mi_expiry").disabled = true;
document.getElementById("mc_expiry").checked = false;
document.getElementById("mi_expiry").value = defaultDate;
createCalendar("mi_expiry", defaultDate);
} else {
document.getElementById("mi_expiry").value = expiry;
document.getElementById("mi_expiry").disabled = false;
document.getElementById("mc_expiry").checked = true;
createCalendar("mi_expiry", expiry);
}
document.getElementById("mNotes").value = notes;
}
function editFileRequest(id, name, maxFiles, maxSize, expiry, notes) {
setModalValues(id, name, maxFiles, maxSize, expiry, notes);
document.getElementById("m_urequestlabel").innerText = "Edit File Request";
$('#addEditModal').modal('show');
document.getElementById("b_fr_save").onclick = function() {
saveFileRequest();
$('#addEditModal').modal('hide');
};
}
function saveFileRequest() {
const buttonSave = document.getElementById("b_fr_save");
const id = document.getElementById("freqId").value;
const name = document.getElementById("mFriendlyName").value;
const notes = document.getElementById("mNotes").value;
let maxFiles = 0;
let maxSize = 0;
let expiry = 0;
if (document.getElementById("mc_maxfiles").checked) {
maxFiles = document.getElementById("mi_maxfiles").value;
}
if (document.getElementById("mc_maxsize").checked) {
maxSize = document.getElementById("mi_maxsize").value;
}
if (document.getElementById("mc_expiry").checked) {
expiry = document.getElementById("mi_expiry").value;
}
buttonSave.disabled = true;
apiURequestSave(id, name, maxFiles, maxSize, expiry, notes)
.then(data => {
document.getElementById("b_fr_save").disabled = false;
insertOrReplaceFileRequest(data);
})
.catch(error => {
alert("Unable to save file request: " + error);
console.error('Error:', error);
document.getElementById("b_fr_save").disabled = false;
});
}
function checkMaxNumber(element) {
if (element.value == "") {
element.value = "1";
return;
}
let maxVal = element.getAttribute("max");
if (maxVal == "") {
return;
}
if (element.value > maxVal) {
element.value = maxVal;
}
}
function insertOrReplaceFileRequest(jsonResult) {
const tbody = document.getElementById("filerequesttable");
let row = document.getElementById(`row-${jsonResult.id}`);
if (row) {
const user = document.getElementById(`cell-username-${jsonResult.id}`).innerText;
row.replaceWith(createFileRequestRow(jsonResult, user));
} else {
let tr = createFileRequestRow(jsonResult, userName);
tr.querySelectorAll('td').forEach((td) => {
td.classList.add("newFileRequest");
setTimeout(() => {
td.classList.remove("newFileRequest");
}, 700);
});
tbody.prepend(tr);
}
}
function createFileRequestRow(jsonResult, user) {
function tdText(text) {
const td = document.createElement("td");
td.textContent = text;
return td;
}
function tdLink(text, href) {
const td = document.createElement("td");
const a = document.createElement("a");
a.textContent = text;
a.href = href;
a.target = "_blank";
td.appendChild(a);
return td;
}
function icon(classes) {
const i = document.createElement("i");
i.className = `bi ${classes}`;
return i;
}
const publicUrl = `${baseUrl}publicUpload?id=${jsonResult.id}&key=${jsonResult.apikey}`;
const tr = document.createElement("tr");
tr.id = `row-${jsonResult.id}`;
tr.className = "filerequest-item";
// Name
tr.appendChild(tdLink(jsonResult.name, publicUrl));
// Uploaded files / Max files
if (jsonResult.maxfiles == 0) {
tr.appendChild(tdText(jsonResult.uploadedfiles));
} else {
tr.appendChild(tdText(`${jsonResult.uploadedfiles} / ${jsonResult.maxfiles}`));
}
// Total size
tr.appendChild(tdText(getReadableSize(jsonResult.totalfilesize)));
// Last upload
tr.appendChild(tdText(formatTimestampWithNegative(jsonResult.lastupload, "None")));
// Expiry
tr.appendChild(tdText(formatFileRequestExpiry(jsonResult.expiry)));
// Optional user column
if (canViewOtherRequests) {
let userTd = tdText(user);
userTd.id = `cell-username-${jsonResult.id}`;
tr.appendChild(userTd);
}
// Buttons
const td = document.createElement("td");
const group = document.createElement("div");
group.className = "btn-group";
group.role = "group";
// Download
const downloadBtn = document.createElement("button");
downloadBtn.id = `download-${jsonResult.id}`;
downloadBtn.type = "button";
downloadBtn.className = "btn btn-outline-light btn-sm";
downloadBtn.title = "Download all";
if (jsonResult.uploadedfiles == 0) {
downloadBtn.classList.add("disabled");
}
downloadBtn.appendChild(icon("bi-download"));
// Copy
const copyBtn = document.createElement("button");
copyBtn.id = `copy-${jsonResult.id}`;
copyBtn.type = "button";
copyBtn.className = "copyurl btn btn-outline-light btn-sm";
copyBtn.title = "Copy URL";
copyBtn.setAttribute("data-clipboard-text", publicUrl);
copyBtn.onclick = () =>
showToast(1000);
copyBtn.appendChild(icon("bi-copy"));
// Edit
const editBtn = document.createElement("button");
editBtn.id = `edit-${jsonResult.id}`;
editBtn.type = "button";
editBtn.className = "btn btn-outline-light btn-sm";
editBtn.title = "Edit request";
editBtn.onclick = () =>
editFileRequest(jsonResult.id, jsonResult.name, jsonResult.maxfiles, jsonResult.maxsize, jsonResult.expiry, jsonResult.notes);
editBtn.appendChild(icon("bi-pencil"));
// Delete
const deleteBtn = document.createElement("button");
deleteBtn.id = `delete-${jsonResult.id}`;
deleteBtn.type = "button";
deleteBtn.className = "btn btn-outline-danger btn-sm";
deleteBtn.title = "Delete";
deleteBtn.onclick = () =>
deleteOrShowModal(jsonResult.id, jsonResult.name, jsonResult.uploadedfiles);
deleteBtn.appendChild(icon("bi-trash3"));
group.append(downloadBtn, copyBtn, editBtn, deleteBtn);
td.appendChild(group);
tr.appendChild(td);
return tr;
}
@@ -385,47 +385,6 @@ function editFile() {
});
}
var calendarInstance = null;
function createCalendar(timestamp) {
// Convert Unix timestamp to JavaScript Date object
const expiryDate = new Date(timestamp * 1000);
calendarInstance = flatpickr('#mi_edit_expiry', {
enableTime: true,
dateFormat: 'U', // Unix timestamp
altInput: true,
altFormat: 'Y-m-d H:i',
allowInput: true,
time_24hr: true,
defaultDate: expiryDate,
minDate: 'today',
});
}
function handleEditCheckboxChange(checkbox) {
var targetElement = document.getElementById(checkbox.getAttribute("data-toggle-target"));
var timestamp = checkbox.getAttribute("data-timestamp");
if (checkbox.checked) {
targetElement.classList.remove("disabled");
targetElement.removeAttribute("disabled");
if (timestamp != null) {
calendarInstance._input.disabled = false;
}
} else {
if (timestamp != null) {
calendarInstance._input.disabled = true;
}
targetElement.classList.add("disabled");
targetElement.setAttribute("disabled", true);
}
}
function showEditModal(filename, id, downloads, expiry, password, unlimitedown, unlimitedtime, isE2e, canReplace) {
// Cloning removes any previous values or form validation
let originalModal = $('#modaledit').clone();
@@ -438,7 +397,7 @@ function showEditModal(filename, id, downloads, expiry, password, unlimitedown,
document.getElementById("m_filenamelabel").innerText = filename;
document.getElementById("mc_expiry").setAttribute("data-timestamp", expiry);
document.getElementById("mb_save").setAttribute('data-fileid', id);
createCalendar(expiry);
createCalendar("mi_edit_expiry", expiry);
if (unlimitedown) {
document.getElementById("mi_edit_down").value = "1";
@@ -843,10 +802,30 @@ function createButtonGroup(item) {
dropdown2.appendChild(emailLi);
group1.appendChild(dropdown2);
// Button group for Edit/Delete
// Button group for Download/Edit/Delete
const group2 = document.createElement("div");
group2.className = "btn-group me-2";
group2.setAttribute("role", "group");
// === Button: Download ===
const btnDownload = document.createElement('button');
btnDownload.type = 'button';
btnDownload.className = 'btn btn-outline-light btn-sm';
btnDownload.title = 'Download';
if (item.RequiresClientSideDecryption) {
btnDownload.classList.add("disabled");
}
const downloadIcon = document.createElement('i');
downloadIcon.className = 'bi bi-download';
btnDownload.appendChild(downloadIcon);
btnDownload.addEventListener('click', () => {
downloadFileWithPresign(item.Id);
});
group2.appendChild(btnDownload);
// === Button: Edit ===
const btnEdit = document.createElement('button');
@@ -79,7 +79,7 @@ function changeRank(userId, newRank, buttonId) {
function showDeleteModal(userId, userEmail) {
function showDeleteUserModal(userId, userEmail) {
let checkboxDelete = document.getElementById("checkboxDelete");
checkboxDelete.checked = false;
document.getElementById("deleteModalBody").innerText = userEmail;
@@ -173,7 +173,8 @@ function addNewUser() {
apiUserCreate(editName.value.trim())
.then(data => {
$('#newUserModal').modal('hide');
addRowUser(data.id, data.name);
addRowUser(data.id, data.name, data.permissions);
console.log(data);
})
.catch(error => {
if (error.message == "duplicate") {
@@ -190,8 +191,87 @@ function addNewUser() {
const PermissionDefinitions = [
{
key: "UserPermGuestUploads",
bit: 1 << 8,
icon: "bi bi-box-arrow-in-down",
title: "Create file requests",
htmlId: userid => `perm_guest_upload_${userid}`,
apiName: "PERM_GUEST_UPLOAD"
},
{
key: "UserPermReplaceUploads",
bit: 1 << 0,
icon: "bi bi-recycle",
title: "Replace own uploads",
htmlId: userid => `perm_replace_${userid}`,
apiName: "PERM_REPLACE"
},
{
key: "UserPermListOtherUploads",
bit: 1 << 1,
icon: "bi bi-eye",
title: "List other uploads",
htmlId: userid => `perm_list_${userid}`,
apiName: "PERM_LIST"
},
{
key: "UserPermEditOtherUploads",
bit: 1 << 2,
icon: "bi bi-pencil",
title: "Edit other uploads",
htmlId: userid => `perm_edit_${userid}`,
apiName: "PERM_EDIT"
},
{
key: "UserPermDeleteOtherUploads",
bit: 1 << 4,
icon: "bi bi-trash3",
title: "Delete other uploads",
htmlId: userid => `perm_delete_${userid}`,
apiName: "PERM_DELETE"
},
{
key: "UserPermReplaceOtherUploads",
bit: 1 << 3,
icon: "bi bi-arrow-left-right",
title: "Replace other uploads",
htmlId: userid => `perm_replace_other_${userid}`,
apiName: "PERM_REPLACE_OTHER"
},
{
key: "UserPermManageLogs",
bit: 1 << 5,
icon: "bi bi-card-list",
title: "Manage system logs",
htmlId: userid => `perm_logs_${userid}`,
apiName: "PERM_LOGS"
},
{
key: "UserPermManageUsers",
bit: 1 << 7,
icon: "bi bi-people",
title: "Manage users",
htmlId: userid => `perm_users_${userid}`,
apiName: "PERM_USERS"
},
{
key: "UserPermManageApiKeys",
bit: 1 << 6,
icon: "bi bi-sliders2",
title: "Manage API keys",
htmlId: userid => `perm_api_${userid}`,
apiName: "PERM_API"
}
];
function addRowUser(userid, name) {
function hasPermission(userPermissions, permissionBit) {
return (userPermissions & permissionBit) !== 0;
}
function addRowUser(userid, name, permissions) {
userid = sanitizeUserId(userid);
@@ -251,7 +331,7 @@ function addRowUser(userid, name) {
btnDelete.type = "button";
btnDelete.className = "btn btn-outline-danger btn-sm";
btnDelete.title = "Delete";
btnDelete.onclick = () => showDeleteModal(userid, name);
btnDelete.onclick = () => showDeleteUserModal(userid, name);
btnDelete.innerHTML = `<i class="bi bi-trash3"></i>`;
btnGroup.appendChild(btnDelete);
@@ -260,23 +340,20 @@ function addRowUser(userid, name) {
cellActions.appendChild(btnGroup);
// Permissions
cellPermissions.innerHTML = `
<i id="perm_replace_${userid}" class="bi bi-recycle perm-notgranted " title="Replace own uploads" onclick='changeUserPermission(${userid},"PERM_REPLACE", "perm_replace_${userid}");'></i>
cellPermissions.innerHTML = PermissionDefinitions.map(perm => {
const granted = hasPermission(permissions, perm.bit)
? "perm-granted"
: "perm-notgranted";
<i id="perm_list_${userid}" class="bi bi-eye perm-notgranted " title="List other uploads" onclick='changeUserPermission(${userid},"PERM_LIST", "perm_list_${userid}");'></i>
<i id="perm_edit_${userid}" class="bi bi-pencil perm-notgranted " title="Edit other uploads" onclick='changeUserPermission(${userid},"PERM_EDIT", "perm_edit_${userid}");'></i>
<i id="perm_delete_${userid}" class="bi bi-trash3 perm-notgranted " title="Delete other uploads" onclick='changeUserPermission(${userid},"PERM_DELETE", "perm_delete_${userid}");'></i>
<i id="perm_replace_other_${userid}" class="bi bi-arrow-left-right perm-notgranted " title="Replace other uploads" onclick='changeUserPermission(${userid},"PERM_REPLACE_OTHER", "perm_replace_other_${userid}");'></i>
<i id="perm_logs_${userid}" class="bi bi-card-list perm-notgranted " title="Manage system logs" onclick='changeUserPermission(${userid},"PERM_LOGS", "perm_logs_${userid}");'></i>
<i id="perm_users_${userid}" class="bi bi-people perm-notgranted " title="Manage users" onclick='changeUserPermission(${userid},"PERM_USERS", "perm_users_${userid}");'></i>
<i id="perm_api_${userid}" class="bi bi-sliders2 perm-notgranted " title="Manage API keys" onclick='changeUserPermission(${userid},"PERM_API", "perm_api_${userid}");'></i>`;
const id = perm.htmlId(userid);
return `
<i id="${id}"
class="${perm.icon} ${granted}"
title="${perm.title}"
onclick='changeUserPermission(${userid}, "${perm.apiName}", "${id}")'>
</i>`;
}).join("");
setTimeout(() => {
@@ -0,0 +1,116 @@
function getUuid() {
// Native UUID, not available in insecure environment
if (typeof crypto !== "undefined" && crypto.randomUUID) {
return crypto.randomUUID();
}
// CSPRNG-backed fallback
if (typeof crypto !== "undefined" && crypto.getRandomValues) {
const bytes = new Uint8Array(16);
crypto.getRandomValues(bytes);
// RFC 4122 compliance
bytes[6] = (bytes[6] & 0x0f) | 0x40; // version 4
bytes[8] = (bytes[8] & 0x3f) | 0x80; // variant 10
return [...bytes]
.map((b, i) =>
(i === 4 || i === 6 || i === 8 || i === 10 ? "-" : "") +
b.toString(16).padStart(2, "0")
)
.join("");
}
// If unavailable, Math.random (not cryptographically secure)
let uuid = "",
i;
for (i = 0; i < 36; i++) {
if (i === 8 || i === 13 || i === 18 || i === 23) {
uuid += "-";
} else if (i === 14) {
uuid += "4";
} else {
const r = Math.random() * 16 | 0;
uuid += (i === 19 ? (r & 0x3) | 0x8 : r).toString(16);
}
}
return uuid;
}
function formatUnixTimestamp(unixTimestamp) {
const date = new Date(unixTimestamp * 1000);
const pad = (n) => String(n).padStart(2, '0');
const year = date.getFullYear();
const month = pad(date.getMonth() + 1); // months are 0-based
const day = pad(date.getDate());
const hours = pad(date.getHours());
const minutes = pad(date.getMinutes());
return `${year}-${month}-${day} ${hours}:${minutes}`;
}
function formatTimestampWithNegative(unixTimestamp, negative) {
if (negative === undefined) {
negative = "Never";
}
if (unixTimestamp == 0) {
return negative;
}
return formatUnixTimestamp(unixTimestamp);
}
function insertFormattedDate(unixTimestamp, id) {
document.getElementById(id).innerText = formatUnixTimestamp(unixTimestamp);
}
function insertDateWithNegative(unixTimestamp, id, negative) {
document.getElementById(id).innerText = formatTimestampWithNegative(unixTimestamp, negative);
}
function insertLastOnlineDate(unixTimestamp, id) {
if ((Date.now() / 1000) - 120 < unixTimestamp) {
document.getElementById(id).innerText = "Online";
return;
}
insertDateWithNegative(unixTimestamp, id);
}
function formatFileRequestExpiry(unixTimestamp) {
if (unixTimestamp == 0) {
return "Never";
}
if ((Date.now() / 1000) > unixTimestamp) {
return "Expired";
}
return formatUnixTimestamp(unixTimestamp);
}
function insertFileRequestExpiry(unixTimestamp, id) {
document.getElementById(id).innerText = formatFileRequestExpiry(unixTimestamp);
}
function getReadableSize(bytes) {
if (!bytes) return "0 B";
const units = ["B", "kB", "MB", "GB", "TB"];
let i = 0;
while (bytes >= 1024 && i < units.length - 1) {
bytes /= 1024;
i++;
}
return `${bytes.toFixed(1)} ${units[i]}`;
}
function insertReadableSize(bytes, multiplier, id) {
document.getElementById(id).innerText = getReadableSize(bytes * multiplier);
}
/**
*
* Base64 encode / decode
* http://www.webtoolkit.info/
*
**/
var Base64={_keyStr:"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",encode:function(r){var t,e,o,a,h,n,c,d="",C=0;for(r=Base64._utf8_encode(r);C<r.length;)a=(t=r.charCodeAt(C++))>>2,h=(3&t)<<4|(e=r.charCodeAt(C++))>>4,n=(15&e)<<2|(o=r.charCodeAt(C++))>>6,c=63&o,isNaN(e)?n=c=64:isNaN(o)&&(c=64),d=d+this._keyStr.charAt(a)+this._keyStr.charAt(h)+this._keyStr.charAt(n)+this._keyStr.charAt(c);return d},decode:function(r){var t,e,o,a,h,n,c="",d=0;for(r=r.replace(/[^A-Za-z0-9\+\/\=]/g,"");d<r.length;)t=this._keyStr.indexOf(r.charAt(d++))<<2|(a=this._keyStr.indexOf(r.charAt(d++)))>>4,e=(15&a)<<4|(h=this._keyStr.indexOf(r.charAt(d++)))>>2,o=(3&h)<<6|(n=this._keyStr.indexOf(r.charAt(d++))),c+=String.fromCharCode(t),64!=h&&(c+=String.fromCharCode(e)),64!=n&&(c+=String.fromCharCode(o));return c=Base64._utf8_decode(c)},_utf8_encode:function(r){r=r.replace(/\r\n/g,"\n");for(var t="",e=0;e<r.length;e++){var o=r.charCodeAt(e);o<128?t+=String.fromCharCode(o):o>127&&o<2048?(t+=String.fromCharCode(o>>6|192),t+=String.fromCharCode(63&o|128)):(t+=String.fromCharCode(o>>12|224),t+=String.fromCharCode(o>>6&63|128),t+=String.fromCharCode(63&o|128))}return t},_utf8_decode:function(r){for(var t="",e=0,o=c1=c2=0;e<r.length;)(o=r.charCodeAt(e))<128?(t+=String.fromCharCode(o),e++):o>191&&o<224?(c2=r.charCodeAt(e+1),t+=String.fromCharCode((31&o)<<6|63&c2),e+=2):(c2=r.charCodeAt(e+1),c3=r.charCodeAt(e+2),t+=String.fromCharCode((15&o)<<12|(63&c2)<<6|63&c3),e+=3);return t}};
@@ -1,36 +0,0 @@
function formatUnixTimestamp(unixTimestamp) {
const date = new Date(unixTimestamp * 1000);
const pad = (n) => String(n).padStart(2, '0');
const year = date.getFullYear();
const month = pad(date.getMonth() + 1); // months are 0-based
const day = pad(date.getDate());
const hours = pad(date.getHours());
const minutes = pad(date.getMinutes());
return `${year}-${month}-${day} ${hours}:${minutes}`;
}
function insertFormattedDate(unixTimestamp, id) {
document.getElementById(id).innerText = formatUnixTimestamp(unixTimestamp);
}
function insertLastOnlineDate(unixTimestamp, id) {
if (unixTimestamp == 0) {
document.getElementById(id).innerText = "Never";
return;
}
if ((Date.now()/1000) - 120 < unixTimestamp) {
document.getElementById(id).innerText = "Online";
return;
}
insertFormattedDate(unixTimestamp, id);
}
function insertLastUsed(unixTimestamp, id) {
if (unixTimestamp == 0) {
document.getElementById(id).innerText = "Never";
return;
}
insertFormattedDate(unixTimestamp, id);
}
File diff suppressed because one or more lines are too long
@@ -0,0 +1,2 @@
function getUuid(){if(typeof crypto!="undefined"&&crypto.randomUUID)return crypto.randomUUID();if(typeof crypto!="undefined"&&crypto.getRandomValues){const e=new Uint8Array(16);return crypto.getRandomValues(e),e[6]=e[6]&15|64,e[8]=e[8]&63|128,[...e].map((e,t)=>(t===4||t===6||t===8||t===10?"-":"")+e.toString(16).padStart(2,"0")).join("")}let t="",e;for(e=0;e<36;e++)if(e===8||e===13||e===18||e===23)t+="-";else if(e===14)t+="4";else{const n=Math.random()*16|0;t+=(e===19?n&3|8:n).toString(16)}return t}function formatUnixTimestamp(e){const t=new Date(e*1e3),n=e=>String(e).padStart(2,"0"),s=t.getFullYear(),o=n(t.getMonth()+1),i=n(t.getDate()),a=n(t.getHours()),r=n(t.getMinutes());return`${s}-${o}-${i} ${a}:${r}`}function formatTimestampWithNegative(e,t){return t===0[0]&&(t="Never"),e==0?t:formatUnixTimestamp(e)}function insertFormattedDate(e,t){document.getElementById(t).innerText=formatUnixTimestamp(e)}function insertDateWithNegative(e,t,n){document.getElementById(t).innerText=formatTimestampWithNegative(e,n)}function insertLastOnlineDate(e,t){if(Date.now()/1e3-120<e){document.getElementById(t).innerText="Online";return}insertDateWithNegative(e,t)}function formatFileRequestExpiry(e){return e==0?"Never":Date.now()/1e3>e?"Expired":formatUnixTimestamp(e)}function insertFileRequestExpiry(e,t){document.getElementById(t).innerText=formatFileRequestExpiry(e)}function getReadableSize(e){if(!e)return"0 B";const n=["B","kB","MB","GB","TB"];let t=0;for(;e>=1024&&t<n.length-1;)e/=1024,t++;return`${e.toFixed(1)} ${n[t]}`}function insertReadableSize(e,t,n){document.getElementById(n).innerText=getReadableSize(e*t)}var Base64={_keyStr:"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",encode:function(e){var t,s,o,i,r,c,l,a="",n=0;for(e=Base64._utf8_encode(e);n<e.length;)c=(r=e.charCodeAt(n++))>>2,l=(3&r)<<4|(s=e.charCodeAt(n++))>>4,i=(15&s)<<2|(o=e.charCodeAt(n++))>>6,t=63&o,isNaN(s)?i=t=64:isNaN(o)&&(t=64),a=a+this._keyStr.charAt(c)+this._keyStr.charAt(l)+this._keyStr.charAt(i)+this._keyStr.charAt(t);return a},decode:function(e){var s,o,i,a,r,c,t="",n=0;for(e=e.replace(/[^A-Za-z0-9+/=]/g,"");n<e.length;)o=this._keyStr.indexOf(e.charAt(n++))<<2|(r=this._keyStr.indexOf(e.charAt(n++)))>>4,i=(15&r)<<4|(s=this._keyStr.indexOf(e.charAt(n++)))>>2,a=(3&s)<<6|(c=this._keyStr.indexOf(e.charAt(n++))),t+=String.fromCharCode(o),64!=s&&(t+=String.fromCharCode(i)),64!=c&&(t+=String.fromCharCode(a));return t=Base64._utf8_decode(t)},_utf8_encode:function(e){e=e.replace(/\r\n/g,`
`);for(var t,n="",s=0;s<e.length;s++)t=e.charCodeAt(s),t<128?n+=String.fromCharCode(t):t>127&&t<2048?(n+=String.fromCharCode(t>>6|192),n+=String.fromCharCode(63&t|128)):(n+=String.fromCharCode(t>>12|224),n+=String.fromCharCode(t>>6&63|128),n+=String.fromCharCode(63&t|128));return n},_utf8_decode:function(e){for(var s="",t=0,n=c1=c2=0;t<e.length;)(n=e.charCodeAt(t))<128?(s+=String.fromCharCode(n),t++):n>191&&n<224?(c2=e.charCodeAt(t+1),s+=String.fromCharCode((31&n)<<6|63&c2),t+=2):(c2=e.charCodeAt(t+1),c3=e.charCodeAt(t+2),s+=String.fromCharCode((15&n)<<12|(63&c2)<<6|63&c3),t+=3);return s}}
+1 -1
View File
@@ -1 +1 @@
function formatUnixTimestamp(e){const t=new Date(e*1e3),n=e=>String(e).padStart(2,"0"),s=t.getFullYear(),o=n(t.getMonth()+1),i=n(t.getDate()),a=n(t.getHours()),r=n(t.getMinutes());return`${s}-${o}-${i} ${a}:${r}`}function insertFormattedDate(e,t){document.getElementById(t).innerText=formatUnixTimestamp(e)}function insertLastOnlineDate(e,t){if(e==0){document.getElementById(t).innerText="Never";return}if(Date.now()/1e3-120<e){document.getElementById(t).innerText="Online";return}insertFormattedDate(e,t)}function insertLastUsed(e,t){if(e==0){document.getElementById(t).innerText="Never";return}insertFormattedDate(e,t)}
function formatUnixTimestamp(e){const t=new Date(e*1e3),n=e=>String(e).padStart(2,"0"),s=t.getFullYear(),o=n(t.getMonth()+1),i=n(t.getDate()),a=n(t.getHours()),r=n(t.getMinutes());return`${s}-${o}-${i} ${a}:${r}`}function formatTimestampWithNegative(e,t){return t===0[0]&&(t="Never"),e==0?t:formatUnixTimestamp(e)}function insertFormattedDate(e,t){document.getElementById(t).innerText=formatUnixTimestamp(e)}function insertDateWithNegative(e,t,n){document.getElementById(t).innerText=formatTimestampWithNegative(e,n)}function insertLastOnlineDate(e,t){if(Date.now()/1e3-120<e){document.getElementById(t).innerText="Online";return}insertDateWithNegative(e,t)}function formatFileRequestExpiry(e){return e==0?"Never":Date.now()/1e3>e?"Expired":formatUnixTimestamp(e)}function insertFileRequestExpiry(e,t){document.getElementById(t).innerText=formatFileRequestExpiry(e)}
File diff suppressed because one or more lines are too long
@@ -0,0 +1,523 @@
function createUploadBox() {
fileInput.addEventListener('change', () => {
Array.from(fileInput.files).forEach(file => {
if (file.size > MAX_FILE_SIZE) {
document.getElementById('span-modal-error').innerText =
`The file "${file.name}" exceeds the maximum allowed size of ${formatSize(MAX_FILE_SIZE)}.`;
errorModal.show();
return;
}
document.getElementById('uploadbutton').disabled = false;
const uuid = getUuid();
const item = document.createElement('div');
item.className = 'pu-file-item';
item.dataset.uuid = uuid;
const name = document.createElement('span');
name.textContent = file.name;
name.className = 'file-name';
const progressText = document.createElement('span');
progressText.className = 'upload-status';
progressText.textContent = 'Ready';
const progressBar = document.createElement('progress');
progressBar.className = 'upload-progress';
if (file.size == 0) {
progressBar.max = 1;
} else {
progressBar.max = file.size;
}
progressBar.value = 0;
const size = document.createElement('span');
size.className = 'file-size';
size.textContent = formatSize(file.size);
const removeBtn = document.createElement('button');
removeBtn.type = 'button';
removeBtn.title = 'Remove';
removeBtn.className = 'btn btn-sm btn-link text-light p-0';
removeBtn.innerHTML = '<i class="bi bi-x-circle"></i>';
removeBtn.onclick = async () => {
const entry = filesMap.get(uuid);
// 1. If currently uploading, abort it
if (entry.controller) {
entry.controller.abort();
}
// 2. If it has a server reservation, clean it up
if (entry.serverUuid) {
await unreserve(entry.serverUuid);
}
entry.removed = true;
item.remove();
updateUploadButtonState();
};
item.append(name, progressText, progressBar, size, removeBtn);
fileList.appendChild(item);
filesMap.set(uuid, {
uuid,
file,
removed: false,
controller: new AbortController(),
lastSpeed: "",
elements: {
progressBar,
progressText,
removeBtn,
item
}
});
});
// Allow re-selecting same files
fileInput.value = '';
});
// --- Drag and Drop Functionality ---
// Prevent default behaviors for drag events
['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => {
uploadBox.addEventListener(eventName, (e) => {
e.preventDefault();
e.stopPropagation();
}, false);
});
// Highlight box when dragging over
['dragenter', 'dragover'].forEach(eventName => {
uploadBox.addEventListener(eventName, () => uploadBox.classList.add('highlight'), false);
});
['dragleave', 'drop'].forEach(eventName => {
uploadBox.addEventListener(eventName, () => uploadBox.classList.remove('highlight'), false);
});
// Handle dropped files
uploadBox.addEventListener('drop', (e) => {
const dt = e.dataTransfer;
const files = dt.files;
handleFiles(files);
});
// --- Paste Functionality ---
window.addEventListener('paste', (e) => {
const items = e.clipboardData.items;
const files = [];
for (let i = 0; i < items.length; i++) {
// Handle Files (Images, etc)
if (items[i].kind === 'file') {
files.push(items[i].getAsFile());
}
// Handle Text pastes (converts text to a .txt file)
else if (items[i].kind === 'string' && items[i].type === 'text/plain') {
items[i].getAsString((text) => {
const blob = new Blob([text], {
type: 'text/plain'
});
const file = new File([blob], "pasted-text.txt", {
type: 'text/plain'
});
handleFiles([file]);
});
}
}
if (files.length > 0) {
handleFiles(files);
}
});
}
function setUnload() {
// Confirm before closing tab
window.addEventListener('beforeunload', (e) => {
const uploading = Array.from(filesMap.values()).some(f => !f.removed);
if (uploading) {
// Standard way to trigger a "Are you sure?" browser dialog
e.preventDefault();
e.returnValue = '';
}
});
// Attempt unreserve on actual exit
window.addEventListener('unload', () => {
for (const entry of filesMap.values()) {
if (!entry.removed && entry.serverUuid) {
unreserve(entry.serverUuid);
}
}
});
}
function handleFiles(files) {
const dataTransfer = new DataTransfer();
Array.from(files).forEach(file => dataTransfer.items.add(file));
fileInput.files = dataTransfer.files;
fileInput.dispatchEvent(new Event('change'));
}
function updateUploadButtonState() {
const btn = document.getElementById("uploadbutton");
const pendingFiles = Array.from(filesMap.values()).filter(entry =>
!entry.removed && entry.elements.progressText.textContent !== "Completed"
);
btn.disabled = pendingFiles.length === 0;
}
function showModal(modalCode) {
let message = "";
switch (modalCode) {
case "alluploaded":
new bootstrap.Modal(document.getElementById('allUploadedModal'), {
keyboard: false,
backdrop: "static"
}).show();
return;
case "maxfiles":
if (maxFilesRemaining == 1) {
message = "Too many files are selected for upload. Please only select 1 file.";
} else {
message = "Too many files are selected for upload. Please only select " + maxFilesRemaining + " files or fewer.";
}
break;
case "maxfilesdynamic":
message = "Some files could not be uploaded because the server rejected the request. This likely occurred because another user was uploading files at the same time and the maximum file limit was reached.";
break;
case "expired":
message = "The upload request exceeded the permitted time limit, and uploading additional files is no longer possible.";
break;
}
document.getElementById('span-modal-error').innerText = message;
errorModal.show();
}
function formatSize(bytes) {
const units = ['B', 'KB', 'MB', 'GB'];
let i = 0;
while (bytes >= 1024 && i < units.length - 1) {
bytes /= 1024;
i++;
}
return bytes.toFixed(1) + ' ' + units[i];
}
async function withRetry(fn, {
retries = 3,
retryDelay = 5000,
onRetry,
onWait, // New callback for 429s
signal
} = {}) {
let lastError;
let attempt = 1;
const startTime = Date.now();
const MAX_WAIT_TIME = 60000; // 60 seconds
while (attempt <= retries) {
if (signal && signal.aborted) throw new Error("Cancelled");
try {
return await fn();
} catch (err) {
lastError = err;
if (err.message === "Cancelled" || (signal && signal.aborted)) throw err;
// Handle Rate Limiting (429)
if (err.status === 429) {
const elapsed = Date.now() - startTime;
if (elapsed < MAX_WAIT_TIME) {
if (onWait) onWait();
await new Promise(r => setTimeout(r, 5000));
continue; // "continue" doesn't increment 'attempt', so it retries indefinitely for 60s
}
}
// Standard Retry Logic
if (onRetry && attempt < retries) {
onRetry(attempt, err);
}
if (err.status === 400 || err.status === 401) throw err;
if (attempt < retries) {
attempt++;
await new Promise(r => setTimeout(r, retryDelay));
} else {
break;
}
}
}
throw lastError;
}
function getQueuedFileCount() {
let count = 0;
for (const entry of filesMap.values()) {
if (!entry.removed) count++;
}
return count;
}
function initUpload() {
const btn = document.getElementById("uploadbutton");
btn.disabled = true;
startUpload().catch(console.error).finally(() => {
updateUploadButtonState();
});
}
async function startUpload() {
if (!IS_UNLIMITED_FILES && getQueuedFileCount() > maxFilesRemaining) {
showModal("maxfiles");
return;
}
for (const entry of filesMap.values()) {
if (entry.removed) continue;
const {
file,
uuid,
elements
} = entry;
// Reset UI state for (re)attempt
elements.progressBar.style.display = "";
elements.progressText.style.color = "";
let lastSpeedText = "";
try {
elements.progressText.textContent = "Reserving...";
const serverUuid = await reserveChunk(elements);
entry.serverUuid = serverUuid;
elements.removeBtn.innerHTML = '<i class="bi bi-stop-circle text-danger"></i>';
elements.removeBtn.title = "Cancel Upload";
let offset = 0;
// do-while so that add chunk is run for 0byte files as well
do {
if (entry.controller.signal.aborted) return;
const chunk = file.slice(offset, offset + CHUNK_SIZE);
await withRetry(async () => {
return new Promise((resolve, reject) => {
const formData = new FormData();
formData.append("file", chunk);
formData.append("uuid", serverUuid);
formData.append("filesize", file.size);
formData.append("offset", offset);
const xhr = new XMLHttpRequest();
entry.xhr = xhr;
xhr.open("POST", UPLOAD_URL);
xhr.setRequestHeader("apikey", API_KEY);
xhr.setRequestHeader("fileRequestId", FILE_REQUEST_ID);
const startTime = Date.now();
// Listen for the cancel signal
const abortHandler = () => {
xhr.abort();
reject(new Error("Cancelled"));
};
entry.controller.signal.addEventListener('abort', abortHandler);
xhr.upload.onprogress = (event) => {
if (event.lengthComputable) {
const chunkOffset = offset + event.loaded;
const totalSize = file.size === 0 ? 1 : file.size;
const percent = Math.floor((chunkOffset / totalSize) * 100);
const duration = (Date.now() - startTime) / 1000;
if (duration > 0) {
// Update the persistent lastSpeedText
lastSpeedText = ` (${formatSize(event.loaded / duration)}/s)`;
}
elements.progressBar.value = chunkOffset;
elements.progressText.textContent = percent + "%" + lastSpeedText;
}
};
xhr.onload = async () => {
entry.controller.signal.removeEventListener('abort', abortHandler);
if (xhr.status >= 200 && xhr.status < 300) resolve();
else reject(await parseXhrError(xhr));
};
xhr.onerror = () => {
const err = new Error(`Server Error`);
err.status = xhr.status;
reject(err);
};
xhr.send(formData);
});
}, {
signal: entry.controller.signal,
onWait: () => {
elements.progressText.textContent = "Waiting for upload slot...";
},
onRetry: (a, e) => {
elements.progressText.textContent = `Retry ${a}/3: ${e.message}${lastSpeedText}`;
}
});
offset += chunk.size;
} while (offset < file.size);
await finaliseUpload(file, serverUuid, elements);
elements.progressText.textContent = "Completed";
elements.item.style.opacity = "0.6";
elements.removeBtn.remove(); // Remove button only on success
filesMap.get(uuid).removed = true;
maxFilesRemaining--;
if (maxFilesRemaining === 0) showModal("alluploaded");
} catch (err) {
if (err.message === "Cancelled" || entry.controller.signal.aborted) return;
elements.progressText.textContent = err.message || "Upload failed";
elements.progressText.style.color = "#ff6b6b";
elements.progressBar.style.display = "none";
elements.removeBtn.innerHTML = '<i class="bi bi-trash"></i>';
elements.removeBtn.title = "Remove from list";
}
}
}
async function parseErrorResponse(response) {
const text = await response.text();
let data = null;
try {
data = JSON.parse(text);
} catch {
/* not JSON */
}
if (data && data.Result === "error") {
let message;
switch (data.ErrorCode) {
case 9:
message = "File size limit exceeded";
break;
case 14:
message = "Upload request has expired";
showModal("expired");
break;
case 15:
message = "Maximum file count reached";
showModal("maxfilesdynamic");
break;
case 16:
message = "Too many requests, please try again later";
break;
default:
message = data.ErrorMessage || "Unknown upload error";
}
const err = new Error(message);
err.status = response.status;
err.code = data.ErrorCode;
err.raw = data;
return err;
}
// Fallback: plain text / non-JSON error
const err = new Error(text || `HTTP ${response.status}`);
err.status = response.status;
return err;
}
async function reserveChunk(elements) {
return withRetry(async () => {
const response = await fetch(RESERVE_URL, {
method: "POST",
headers: {
id: FILE_REQUEST_ID,
apikey: API_KEY
}
});
if (!response.ok) {
throw await parseErrorResponse(response);
}
const data = await response.json();
if (!data.Uuid) throw new Error("Invalid reserve response");
return data.Uuid;
}, {
onRetry: (a, e) => {
elements.progressText.textContent = `Retry ${a}/3: ${e.message}`;
}
});
}
async function finaliseUpload(file, uuid, elements) {
await withRetry(async () => {
const response = await fetch(COMPLETE_URL, {
method: "POST",
headers: {
uuid,
fileRequestId: FILE_REQUEST_ID,
filename: encodeFilename(file.name),
filesize: file.size,
nonblocking: true,
contenttype: file.type || "application/octet-stream",
apikey: API_KEY
}
});
if (!response.ok) {
throw await parseErrorResponse(response);
}
}, {
onRetry: (a, e) => {
elements.progressText.textContent = `Retry ${a}/3: ${e.message}`;
}
});
}
function encodeFilename(name) {
return "base64:" + Base64.encode(name);
}
async function unreserve(uuid) {
if (!uuid) return;
try {
await fetch(UNRESERVE_URL, {
method: "POST",
headers: {
uuid: uuid,
apikey: API_KEY,
id: FILE_REQUEST_ID
},
keepalive: true // Crucial for calls during page unload
});
} catch (e) {
console.error("Unreserve failed", e);
}
}
-37
View File
@@ -1,37 +0,0 @@
function getUuid() {
// Native UUID, not available in insecure environment
if (typeof crypto !== "undefined" && crypto.randomUUID) {
return crypto.randomUUID();
}
// CSPRNG-backed fallback
if (typeof crypto !== "undefined" && crypto.getRandomValues) {
const bytes = new Uint8Array(16);
crypto.getRandomValues(bytes);
// RFC 4122 compliance
bytes[6] = (bytes[6] & 0x0f) | 0x40; // version 4
bytes[8] = (bytes[8] & 0x3f) | 0x80; // variant 10
return [...bytes]
.map((b, i) =>
(i === 4 || i === 6 || i === 8 || i === 10 ? "-" : "") +
b.toString(16).padStart(2, "0")
)
.join("");
}
// If unavailable, Math.random (not cryptographically secure)
let uuid = "", i;
for (i = 0; i < 36; i++) {
if (i === 8 || i === 13 || i === 18 || i === 23) {
uuid += "-";
} else if (i === 14) {
uuid += "4";
} else {
const r = Math.random() * 16 | 0;
uuid += (i === 19 ? (r & 0x3) | 0x8 : r).toString(16);
}
}
return uuid;
}
@@ -61,7 +61,7 @@
</thead>
<tbody id="downloadtable">
{{ range .Items }}
{{ if not .IsPendingDeletion }}
{{ if not (or .IsPendingDeletion .IsFileRequest) }}
{{ if or (gt .ExpireAt $.TimeNow) (.UnlimitedTime) }}
{{ if or (gt .DownloadsRemaining 0) (.UnlimitedDownloads) }}
<tr id="row-{{ .Id }}">
@@ -87,6 +87,7 @@
{{ template "admin_button_share" (newAdminButtonContext . $.ActiveUser)}}
</div>
<div class="btn-group me-2" role="group">
{{ template "admin_button_download" (newAdminButtonContext . $.ActiveUser) }}
{{ template "admin_button_edit" (newAdminButtonContext . $.ActiveUser) }}
{{ template "admin_button_delete" (newAdminButtonContext . $.ActiveUser) }}
</div>
@@ -185,13 +186,26 @@
{{ end }}
{{ template "pagename" "UploadMenu"}}
{{ template "pagename" "FileRequest"}}
{{ template "customjs" .}}
{{ template "footer" true}}
{{ end }}
{{ define "admin_button_download" }}
<button
type="button"
title="Download"
class="btn btn-outline-light btn-sm {{if .CurrentFile.RequiresClientSideDecryption}}disabled{{end}}"
onclick="downloadFileWithPresign('{{.CurrentFile.Id }}');">
<i class="bi bi-download"></i>
</button>
{{ end }}
{{ define "admin_button_edit" }}
<button
type="button"
+12 -2
View File
@@ -41,15 +41,24 @@
<td id="friendlyname-{{ .PublicId }}" onClick="addFriendlyNameChange('{{ .PublicId }}')">{{ .FriendlyName }}</td>
<td><div class="font-monospace">{{ .GetRedactedId }}</div></td>
<td><span id="cell-lastused-{{ .PublicId }}"></span></td>
<script>insertLastUsed({{ .LastUsed }}, "cell-lastused-{{ .PublicId }}");</script>
<script>insertDateWithNegative({{ .LastUsed }}, "cell-lastused-{{ .PublicId }}");</script>
<td class="prevent-select">
<i id="perm_view_{{ .PublicId }}" class="bi bi-eye {{if not .HasPermissionView}}perm-notgranted{{else}}perm-granted{{end}}" title="List Uploads" onclick='changeApiPermission("{{ .PublicId }}","PERM_VIEW", "perm_view_{{ .PublicId }}");'></i>
<i id="perm_upload_{{ .PublicId }}" class="bi bi-file-earmark-arrow-up {{if not .HasPermissionUpload}}perm-notgranted{{else}}perm-granted{{end}}" title="Upload" onclick='changeApiPermission("{{ .PublicId }}","PERM_UPLOAD", "perm_upload_{{ .PublicId }}");'></i>
<i id="perm_upload_{{ .PublicId }}" class="bi bi-file-earmark-plus {{if not .HasPermissionUpload}}perm-notgranted{{else}}perm-granted{{end}}" title="Upload" onclick='changeApiPermission("{{ .PublicId }}","PERM_UPLOAD", "perm_upload_{{ .PublicId }}");'></i>
<i id="perm_edit_{{ .PublicId }}" class="bi bi-pencil {{if not .HasPermissionEdit}}perm-notgranted{{else}}perm-granted{{end}}" title="Edit Uploads" onclick='changeApiPermission("{{ .PublicId }}","PERM_EDIT", "perm_edit_{{ .PublicId }}");'></i>
<i id="perm_delete_{{ .PublicId }}" class="bi bi-trash3 {{if not .HasPermissionDelete}}perm-notgranted{{else}}perm-granted{{end}}" title="Delete Uploads" onclick='changeApiPermission("{{ .PublicId }}","PERM_DELETE", "perm_delete_{{ .PublicId }}");'></i>
<i id="perm_replace_{{ .PublicId }}" class="bi bi-recycle {{if not (index $.UserMap .UserId).HasPermissionReplace}}perm-unavailable perm-nochange{{ else }}{{if not .HasPermissionReplace}}perm-notgranted{{else}}perm-granted{{end}}{{end}}" title="Replace Uploads" onclick='changeApiPermission("{{ .PublicId }}","PERM_REPLACE", "perm_replace_{{ .PublicId }}");'></i>
<i id="perm_download_{{ .PublicId }}" class="bi bi-box-arrow-in-down {{if not .HasPermissionDownload}}perm-notgranted{{else}}perm-granted{{end}}" title="Download Files" onclick='changeApiPermission("{{ .PublicId }}","PERM_DOWNLOAD", "perm_download_{{ .PublicId }}");'></i>
<i id="perm_manage_file_requests_{{ .PublicId }}" class="bi bi-file-earmark-arrow-up {{if not (index $.UserMap .UserId).HasPermissionCreateFileRequests}}perm-unavailable perm-nochange{{ else }}{{if not .HasPermissionManageFileRequests}}perm-notgranted{{else}}perm-granted{{end}}{{end}}" title="Manage File Requests" onclick='changeApiPermission("{{ .PublicId }}","PERM_MANAGE_FILE_REQUESTS", "perm_manage_file_requests_{{ .PublicId }}");'></i>
<i id="perm_users_{{ .PublicId }}" class="bi bi-people {{if not (index $.UserMap .UserId).HasPermissionManageUsers}}perm-unavailable perm-nochange{{ else }}{{if not .HasPermissionManageUsers}}perm-notgranted{{else}}perm-granted{{end}}{{end}}" title="Manage Users" onclick='changeApiPermission("{{ .PublicId }}","PERM_MANAGE_USERS", "perm_users_{{ .PublicId }}");'></i>
<i id="perm_logs_{{ .PublicId }}" class="bi bi-card-list {{if not (index $.UserMap .UserId).HasPermissionManageLogs}}perm-unavailable perm-nochange{{ else }}{{if not .HasPermissionManageLogs}}perm-notgranted{{else}}perm-granted{{end}}{{end}}" title="Manage System Logs" onclick='changeApiPermission("{{ .PublicId }}","PERM_MANAGE_LOGS", "perm_logs_{{ .PublicId }}");'></i>
@@ -77,6 +86,7 @@
var canReplaceFiles = {{.ActiveUser.HasPermissionReplace }};
var canManageUsers = {{.ActiveUser.HasPermissionManageUsers }};
var canViewSystemLog = {{.ActiveUser.HasPermissionManageLogs }};
var canCreateFileRequest = {{.ActiveUser.HasPermissionCreateFileRequests }};
</script>
@@ -1,27 +1,52 @@
{{define "error"}}{{template "header" .}}
<div class="row">
<div class="col">
<div class="card" style="width: 18rem;">
<div class="card-body">
<h2 class="card-title">Error</h2>
<p class="card-text">
<br>
<div class="row justify-content-center mt-5">
<div class="col-md-6">
<div class="card shadow-sm" style="width: {{ .ErrorCardWidth }}rem;">
<div class="card-body text-center">
{{ if eq .ErrorId 0 }}
Sorry, this file cannot be found.<br><br>Either the link has expired or it has been downloaded too many times.
<h2 class="card-title">
File not found
</h2>
<br>
The link may have expired or the file has been downloaded too many times.
{{ end }}
{{ if eq .ErrorId 1 }}
This file is encrypted and no key has been passed.<br><br>Please contact the uploader to give you the correct link, including the value after the hash.
<h2 class="card-title">
Missing decryption key
</h2>
<br>
This file is encrypted, but no key was provided.<br>
Please contact the uploader and ensure the full link (including the value after the hash) is used.
{{ end }}
{{ if eq .ErrorId 2 }}
This file is encrypted and an incorrect key has been passed.<br><br>If this file is end-to-end encrypted, please contact the uploader to give you the correct link, including the value after the hash.
<h2 class="card-title">
Invalid decryption key
</h2>
<br>
This file is encrypted, but the provided key is incorrect.<br>
If this file is end-to-end encrypted, please request the correct link from the uploader.
{{ end }}
<br>&nbsp;
</p>
</div>
</div>
</div>
{{ if eq .ErrorId 3 }}
<h2 class="card-title">
Unable to upload files
</h2>
<br>
This can happen for one of the following reasons:
<i><ul class="list-unstyled mt-2">
<li>- The upload request has expired (time limit reached)</li>
<li>- The file limit for this upload request has been reached</li>
<li>- An invalid upload URL was submitted</li>
</ul></i>
{{ end }}
</p>
</div>
</div>
</div>
</div>
{{ template "pagename" "PublicError"}}
{{ template "customjs" .}}
{{template "footer"}}
@@ -14,6 +14,7 @@
<link rel="manifest" href="./site.webmanifest">
<link href="./css/min/gokapi.min.{{ template "css_main"}}.css" rel="stylesheet">
<link href="./assets/dist/icons/bootstrap-icons.min.css" rel="stylesheet">
<script src="./js/min/all_public.min.js"></script>
{{ if .IsAdminView }}
<title>{{.PublicName}} Admin</title>
<script src="./assets/dist/js/dropzone.min.js?v={{ template "js_dropzone_version"}}"></script>
@@ -25,8 +26,6 @@
<link href="./assets/dist/css/datatables.min.css" rel="stylesheet">
<link href="./assets/dist/css/flatpickr.min.css" rel="stylesheet">
<link href="./assets/dist/css/flatpickr.dark.min.css" rel="stylesheet">
<script src="./assets/dist/js/base64.min.js"></script>
<script src="./js/min/dateformat.min.js"></script>
<style>
.masthead-brand {
float: left;
@@ -59,11 +58,6 @@
{{ else }}
<title>{{.PublicName}}</title>
{{end }}
<style>
body {
box-shadow: inset 0 0 5rem rgba(0, 0, 0, .5);
}
</style>
{{ if .CustomContent.UseCustomCss }}
<link href="./custom/custom.v{{.CustomContent.Version}}.css" rel="stylesheet">
{{ end }}
@@ -78,13 +72,16 @@
<h1>{{.PublicName}}</h1>
<nav class="nav nav-masthead justify-content-center">
<a class="nav-link {{ if eq .ActiveView 0}}active{{ end }}" href="./admin">Upload</a>
{{ if .ActiveUser.HasPermissionManageLogs }}
<a class="nav-link {{ if eq .ActiveView 1 }}active{{ end }}" href="./logs">Logs</a>
{{ if .ActiveUser.HasPermissionCreateFileRequests }}
<a class="nav-link {{ if eq .ActiveView 4 }}active{{ end }}" href="./filerequests">File Requests</a>
{{ end }}
{{ if and .ActiveUser.HasPermissionManageUsers .IsUserTabAvailable }}
<a class="nav-link {{ if eq .ActiveView 3 }}active{{ end }}" href="./users">Users</a>
{{ end }}
<a class="nav-link {{ if eq .ActiveView 2 }}active{{ end }}" href="./apiKeys">API</a>
{{ if .ActiveUser.HasPermissionManageLogs }}
<a class="nav-link {{ if eq .ActiveView 1 }}active{{ end }}" href="./logs">Logs</a>
{{ end }}
{{ if .IsLogoutAvailable }}<a class="nav-link" href="./logout">Logout</a>{{ end }}
</nav>
</div>
@@ -7,10 +7,11 @@
<br>
<p class="card-text">
<form method="post" action="./login" id="form" name="form" onSubmit="submitForm()">
<input type="text" placeholder="Username" value="{{.User}}" id="username">
<input type="text"
autocomplete="username" placeholder="Username" value="{{.User}}" id="username">
<input type="hidden" id="uname_hidden" name="username">
<br><br>
<input type="password" placeholder="Password" id="password">
<input type="password" autocomplete="current-password" placeholder="Password" id="password">
<input type="hidden" id="pw_hidden" name="password">
<br><br>
{{ if .IsFailedLogin }}
@@ -0,0 +1,127 @@
{{define "publicUpload"}}{{template "header" .}}
<main style="margin-top: 2rem">
<div class="row justify-content-center">
<div class="col-lg-8">
<div class="card bg-dark text-white">
<div class="card-body">
<h3 class="card-title text-center mb-4">Upload Files</h3>
{{ if ne .FileRequest.Notes "" }}
<div class="info-box">
<h6>Note</h6>
<p class="mb-0">{{.FileRequest.Notes}}</p>
</div>
{{ end }}
{{ if .FileRequest.HasRestrictions }}
<div class="info-box">
<h6>Upload restrictions</h6>
<ul>
{{ if not (.FileRequest.IsUnlimitedTime) }}
<li>Upload possible until: <strong>
<span id="expirydate">{{ .FileRequest.Expiry }}</span>
</strong>
</li>
<script>
insertFormattedDate({{.FileRequest.Expiry}}, "expirydate");
</script>
{{ end }}
{{ if or (not (.FileRequest.IsUnlimitedSize)) (lt .FileRequest.CombinedMaxSize 5000) }}
<li>Maximum file size: <strong>
<span id="maxfilesize"></span>
</strong>
</li>
<script>
insertReadableSize({{.FileRequest.CombinedMaxSize}}, 1024 * 1024, "maxfilesize");
</script>
{{ end }}
{{ if not (.FileRequest.IsUnlimitedFiles) }}
<li>Maximum number of files: <strong>{{ .FileRequest.FilesRemaining }}</strong>
</li>
{{ end }}
</ul>
</div>
{{ end }}
<label for="fileInput" id="uploadBox" class="upload-box text-center w-100">
<p class="mb-2 fs-5">Drag & drop files here</p>
<p class="mb-0 opacity-75">or paste or click to select</p>
<input type="file" id="fileInput" class="d-none" multiple>
</label>
<div id="fileList" class="pu-file-list"></div>
<div class="text-center mt-4">
<button class="btn btn-primary" disabled onClick="initUpload();" id="uploadbutton"> Upload </button>
</div>
</div>
</div>
</div>
</div>
</main>
<!-- Error Modal -->
<div class="modal fade" id="errorModal" tabindex="-1" aria-labelledby="errorModalLabel" aria-hidden="true">
<div class="modal-dialog gokapi-dialog">
<div class="modal-content gokapi-dialog">
<div class="modal-header">
<h2 class="modal-title fs-5" id="errorModalLabel">Unable to upload</h2>
</div>
<div class="modal-body">
<span id="span-modal-error"></span>
</div>
<div class="modal-footer">
<button type="button" data-bs-dismiss="modal" data-bs-target="#errorModal" class="btn btn-primary">Continue</button>
</div>
</div>
</div>
</div>
<!-- All Uploaded Modal -->
<div class="modal fade" id="allUploadedModal" tabindex="-1" aria-labelledby="allUModalLabel" aria-hidden="true">
<div class="modal-dialog gokapi-dialog">
<div class="modal-content gokapi-dialog">
<div class="modal-header">
<h2 class="modal-title fs-5" id="allUModalLabel">Success</h2>
</div>
<div class="modal-body">
All files have been successfully uploaded. No further files can be uploaded anymore. You can close this page now.
</div>
<div class="modal-footer">
</div>
</div>
</div>
</div>
<script src="./js/min/public_upload.min.js"></script>
<script>
const fileInput = document.getElementById('fileInput');
const fileList = document.getElementById('fileList');
const filesMap = new Map();
const errorModal = new bootstrap.Modal(document.getElementById('errorModal'));
const uploadBox = document.getElementById('uploadBox');
const CHUNK_SIZE = {{.ChunkSize}} * 1024 * 1024;
const API_BASE = "./api/uploadrequest/chunk/";
const RESERVE_URL = API_BASE + "reserve";
const UNRESERVE_URL = API_BASE + "unreserve";
const UPLOAD_URL = API_BASE + "add";
const COMPLETE_URL = API_BASE + "complete";
const FILE_REQUEST_ID = "{{ .FileRequest.Id }}";
const API_KEY = "{{ .FileRequest.ApiKey }}";
const MAX_FILE_SIZE = {{.FileRequest.CombinedMaxSize}} * 1024 * 1024;
const IS_UNLIMITED_FILES = {{ .FileRequest.IsUnlimitedFiles }};
const IS_UNLIMITED_TIME = {{ .FileRequest.IsUnlimitedTime }};
var maxFilesRemaining = {{.FileRequest.FilesRemaining}};
createUploadBox();
setUnload();
</script>
{{ template "pagename" "PublicUpload"}}
{{ template "customjs" .}}
{{ template "footer"}}
{{end}}
@@ -0,0 +1,243 @@
{{ define "uploadreq" }}{{ template "header" . }}
<div class="row">
<div class="col">
<div id="container" class="card" style="width: 80%">
<div class="card-body">
<div class="container">
<div class="row mb-4">
<div class="col">
</div>
<div class="col text-center">
<h3 class="card-title mb-0">File Requests</h3>
</div>
<div class="col text-end">
<button id="button-newfr" class="btn btn-outline-light" onclick="newFileRequest()">
<i class="bi bi-plus-circle-fill"></i>
</button>
</div>
</div>
</div>
<br>
<div class="table-responsive">
<table class="table table-dark ">
<thead>
<tr>
<th scope="col">Name</th>
<th scope="col">Uploaded Files</th>
<th scope="col">Total Size</th>
<th scope="col">Last Upload</th>
<th scope="col">Expiry</th>
{{ if .ActiveUser.HasPermissionListOtherUploads }}
<th scope="col">User</th>
{{ end }}
<th scope="col">Actions</th>
</tr>
</thead>
<tbody id="filerequesttable">
{{ range .FileRequests }}
<tr id="row-{{ .Id }}" class="no-bottom-border filerequest-item">
<td><a href="{{ $.ServerUrl }}publicUpload?id={{ .Id }}&key={{ .ApiKey }}" target="_blank">{{ .Name }}</a></td>
{{ template "uRFileCell" . }}
<td>{{ .GetReadableTotalSize }}</td>
<td><span id="cell-lastupdate-{{ .Id }}"></span></td>
<script>insertDateWithNegative({{ .LastUpload }}, "cell-lastupdate-{{ .Id }}", "None");</script>
<td><span id="cell-expiry-{{ .Id }}"></span></td>
<script>insertFileRequestExpiry({{ .Expiry }}, "cell-expiry-{{ .Id }}");</script>
{{ if $.ActiveUser.HasPermissionListOtherUploads }}
<td id="cell-username-{{ .Id }}">{{(index $.UserMap .UserId).Name}}</td>
{{ end }}
<td>
<div class="btn-group" role="group">
{{ template "uRDownloadbutton" . }}
<button id="copy-{{ .Id }}" type="button" data-clipboard-text="{{ $.ServerUrl }}publicUpload?id={{ .Id }}&key={{ .ApiKey }}" class="copyurl btn btn-outline-light btn-sm" onclick="showToast(1000);" title="Copy URL"><i class="bi bi-copy"></i></button>
<button id="edit-{{ .Id }}" type="button" title="Edit request" class="btn btn-outline-light btn-sm" onclick="editFileRequest('{{ .Id }}', '{{ .Name }}', {{ .MaxFiles }}, {{ .MaxSize }}, {{ .Expiry }}, '{{ .Notes }}')">
<i class="bi bi-pencil"></i></button>
<button id="delete-{{ .Id }}" type="button" class="btn btn-outline-danger btn-sm" onclick="deleteOrShowModal('{{ .Id }}', '{{ .Name }}', {{ .UploadedFiles }})" title="Delete"><i class="bi bi-trash3"></i></button>
</div>
</td>
</tr>
<tr id="filelist-{{ .Id }}">
<td colspan="7" class="p-0">
<div id="collapse-filelist-{{ .Id }}" class="collapse bg-dark">
<div class="p-2">
<ul class="list-group list-group-flush">
{{ range .Files }}
<li class="list-group-itemtext-light d-flex align-items-center border-bottom-0 filelist-item ">
<div class="flex-grow-1 text-truncate">
<a href="#" class="text-decoration-none text-light" onClick="downloadFileWithPresign('{{ .Id }}');">
{{ .Name }}
</a>
</div>
<div class="small me-3 text-nowrap text-light">
{{ .Size }} · <span id="cell-date-file-{{.Id}}"></span>
</div>
<script>insertFormattedDate({{ .UploadDate }}, "cell-date-file-{{.Id}}");</script>
<button class="btn btn-outline-light btn-sm"
onClick="downloadFileWithPresign('{{ .Id }}');"
title="Download">
<i class="bi bi-download"></i>
</button>
</li>
{{ end }}
</ul>
</div>
</div> </td>
</tr>
{{ end }}
</tbody>
</table>
</div>
</div>
</div>
<div id="toastnotification" class="toastnotification" data-default="URL copied to clipboard">Toast Text</div>
</div>
</div>
<script src="./js/min/admin.min.{{ template "js_admin_version"}}.js"></script>
<script>
var userName = "{{.ActiveUser.Name}}";
var baseUrl = "{{.ServerUrl}}";
var canViewOtherRequests = {{.ActiveUser.HasPermissionListOtherUploads}};
var limitMaxSize = {{.FileRequestMaxSize}};
var limitMaxFiles = {{.FileRequestMaxFiles}};
</script>
{{ template "urequest_modal_confirm" }}
{{ template "urequest_modal_addedit" }}
{{ template "pagename" "UploadRequest"}}
{{ template "customjs" .}}
{{ template "footer" true }}
{{ end }}
{{ define "urequest_modal_confirm" }}
<div class="modal" tabindex="-1" id="deleteModal">
<div class="modal-dialog gokapi-dialog">
<div class="modal-content gokapi-dialog">
<div class="modal-header">
<h5 class="modal-title">Delete File Request</h5>
</div>
<div class="modal-body">
<p>Are you sure you want to delete the filerequest &quot;<span id="deleteModalBodyName" class="fw-bold"></span>&quot;?<br><br>
This also permanently deletes <span id="deleteModalBodyCount" class="fw-bold"></span> associated file(s) and cannot be undone.</p>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-outline-light" data-bs-dismiss="modal">Cancel</button>
<button type="button" id="buttonDelete" class="btn btn-danger">Delete</button>
</div>
</div>
</div>
</div>
{{ end }}
{{ define "urequest_modal_addedit" }}
<div class="modal fade" id="addEditModal" tabindex="-1" aria-hidden="true">
<div class="modal-dialog modal-lg gokapi-dialog">
<div class="modal-content gokapi-dialog">
<div class="modal-header">
<h1 class="modal-title fs-5" id="m_urequestlabel">Title</h1>
</div>
<div class="modal-body">
<div class="input-group mb-3">
<span class="input-group-text" id="mTitle">Title&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span>
<input type="text" id="mFriendlyName" class="form-control" placeholder="Friendly name" aria-label="Friendly name" aria-describedby="mTitle">
</div>
<div class="input-group mb-3">
<div class="input-group-text">
<input type="checkbox" id="mc_maxfiles" aria-label="Max files" title="Max files" data-toggle-target="mi_maxfiles" onchange="handleEditCheckboxChange(this)">
</div>
<span class="input-group-text" id="mMaxFiles">Max Files</span>
<input type="number" min="1" id="mi_maxfiles" onChange="checkMaxNumber(this)" disabled class="form-control" aria-label="Max Files" aria-describedby="mMaxFiles" data-allow-regular-paste>
</div>
<div class="input-group mb-3">
<div class="input-group-text">
<input type="checkbox" id="mc_maxsize" aria-label="Max Size" title="Max Size"
data-toggle-target="mi_maxsize" onchange="handleEditCheckboxChange(this)">
</div>
<span class="input-group-text" id="tMaxSize">Max Size&nbsp;</span>
<input type="number" min="1" id="mi_maxsize" onChange="checkMaxNumber(this)" disabled class="form-control" aria-label="Max Size" aria-describedby="tMaxSize" data-allow-regular-paste>
<span class="input-group-text">MB</span>
</div>
<div class="input-group mb-3">
<div class="input-group-text">
<input id="mc_expiry" type="checkbox" aria-label="Expiry" title="Expiry" data-toggle-target="mi_expiry" data-timestamp="" onchange="handleEditCheckboxChange(this)" >
</div>
<span class="input-group-text" id="edit_expdate">Expiry&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span>
<input type="text" id="mi_expiry" disabled class="form-control" aria-label="Expiry" aria-describedby="edit_expdate" data-allow-regular-paste>
</div>
<div class="input-group mb-3">
<span class="input-group-text" id="mdNotes">Notes&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span>
<input type="text" id="mNotes" class="form-control" placeholder="Notes about the request" aria-label="Notes" aria-describedby="mdNotes">
</div>
</div>
<input type="hidden" id="freqId" value="" />
{{ if .EndToEndEncryption }}
<div class="callout callout-info">
Uploaded files are not end-to-end encrypted and will be stored in plain text on the server
</div>
{{ end }}
<div class="modal-footer">
<button type="button" class="btn btn-outline-light" aria-label="Cancel" data-bs-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" data-fileid="" id="b_fr_save" onclick="">Save</button>
</div>
</div>
</div>
</div>
{{ end }}
{{ define "uRDownloadbutton" }}
{{ if eq .UploadedFiles 0 }}
<button id="download-{{ .Id }}" type="button" class="btn btn-outline-light btn-sm disabled" title="Download all"><i class="bi bi-download"></i></button>
{{ else }}
{{ if eq .UploadedFiles 1 }}
<button id="download-{{ .Id }}" type="button" class="btn btn-outline-light btn-sm" onclick="downloadFileWithPresign('{{ (index .FileIdList 0) }}');" title="Download all"><i class="bi bi-download"></i></button>
{{ else }}
<button id="download-{{ .Id }}" type="button" class="btn btn-outline-light btn-sm" onclick="downloadFilesZipWithPresign('{{ .GetFilesAsString }}', '{{ .Name }}');" title="Download all"><i class="bi bi-download"></i></button>
{{ end }}
{{ end }}
{{ end }}
{{ define "uRFileCell" }}
<td>
<span title="Uploaded files">{{ .UploadedFiles }}</span>{{ if ne .ReservedUploads 0 }}<span title="Active uploads">+{{.ReservedUploads}}</span>{{end}}{{ if ne .MaxFiles 0 }} / <span title="File limit">{{ .MaxFiles }}</span>{{end}}
{{ if gt .UploadedFiles 0 }}
<button
class="btn btn-sm btn-link text-light p-0 collapse-toggle"
data-bs-toggle="collapse"
data-bs-target="#collapse-filelist-{{ .Id }}"
aria-expanded="false"
title = "Expand / Collapse"
aria-controls="collapse-filelist-{{ .Id }}">
<i class="bi bi-chevron-down"></i>
</button>
{{end}}
</td>
{{ end }}
@@ -40,6 +40,9 @@
<td>{{ .UploadCount }}</td>
<td class="prevent-select">
<i id="perm_guest_upload_{{ .User.Id }}" class="bi bi-box-arrow-in-down {{if not .User.HasPermissionCreateFileRequests}}perm-notgranted{{else}}perm-granted{{end}} {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}perm-nochange{{end}}" title="Create file requests" onclick='changeUserPermission("{{ .User.Id }}","PERM_GUEST_UPLOAD", "perm_guest_upload_{{ .User.Id }}");'></i>
<i id="perm_replace_{{ .User.Id }}" class="bi bi-recycle {{if not .User.HasPermissionReplace}}perm-notgranted{{else}}perm-granted{{end}} {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}perm-nochange{{end}}" title="Replace own uploads" onclick='changeUserPermission("{{ .User.Id }}","PERM_REPLACE", "perm_replace_{{ .User.Id }}");'></i>
<i id="perm_list_{{ .User.Id }}" class="bi bi-eye {{if not .User.HasPermissionListOtherUploads}}perm-notgranted{{else}}perm-granted{{end}} {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}perm-nochange{{end}}" title="List other uploads" onclick='changeUserPermission("{{ .User.Id }}","PERM_LIST", "perm_list_{{ .User.Id }}");'></i>
@@ -56,6 +59,7 @@
<i id="perm_api_{{ .User.Id }}" class="bi bi-sliders2 {{if not .User.HasPermissionManageApi}}perm-notgranted{{else}}perm-granted{{end}} {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}perm-nochange{{end}}" title="Manage API keys" onclick='changeUserPermission("{{ .User.Id }}","PERM_API", "perm_api_{{ .User.Id }}");'></i>
</td>
<td>
<div class="btn-group" role="group">
@@ -72,7 +76,7 @@
{{ end }}
<button id="delete-{{ .User.Id }}" type="button" class="btn btn-outline-danger btn-sm" {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}disabled{{end}} onclick="showDeleteModal('{{ .User.Id }}', '{{ .User.Name }}')" title="Delete"><i class="bi bi-trash3"></i></button>
<button id="delete-{{ .User.Id }}" type="button" class="btn btn-outline-danger btn-sm" {{if or (eq .User.UserLevel 0) (eq .User.Id $.ActiveUser.Id)}}disabled{{end}} onclick="showDeleteUserModal('{{ .User.Id }}', '{{ .User.Name }}')" title="Delete"><i class="bi bi-trash3"></i></button>
</div></td>
</tr>
{{ end }}
@@ -1,5 +1,5 @@
// File contains auto-generated values. Do not change manually
{{define "version"}}2.1.0{{end}}
{{define "version"}}2.2.0-dev{{end}}
// Specifies the version of JS files, so that the browser doesn't
// use a cached version, if the file has been updated

Some files were not shown because too many files have changed in this diff Show More