From 44260f012b59e58240a9a1f2de8ae45cbf60e13b Mon Sep 17 00:00:00 2001
From: Marc Ole Bulling
Date: Mon, 25 Jul 2022 13:21:38 +0200
Subject: [PATCH] Use chunked uploads instead of single upload (#68)
* Change to chunked uploads
* Renamed SHA256 attribute to SHA1
* Breaking: removed /upload endpoint
* Changed timeout to 15 minutes for reading
* Change session key only after 12 hours
* Added API calls for chunk upload, refactoring, added tests
* Add content-type based on file extension for images if empty
---
build/go.mod | 3 +
cmd/gokapi/Main.go | 4 +-
go.mod | 2 +
go.sum | 3 +
internal/configuration/Configuration.go | 4 +-
internal/configuration/Configuration_test.go | 9 +-
.../configuration/configupgrade/Upgrade.go | 132 ++-----
.../configupgrade/Upgrade_test.go | 67 ++--
internal/configuration/database/Database.go | 27 +-
.../configuration/database/Database_test.go | 5 +
.../configuration/setup/templates/setup.tmpl | 8 +-
internal/encryption/Encryption.go | 24 ++
.../environment/flagparser/FlagParser_test.go | 1 +
internal/helper/StringGeneration.go | 1 -
internal/models/FileList.go | 7 +-
internal/models/FileList_test.go | 17 +-
internal/models/FileUpload.go | 1 -
internal/storage/FileServing.go | 303 ++++++++++++---
internal/storage/FileServing_test.go | 133 ++++++-
internal/storage/chunking/Chunking.go | 201 ++++++++++
internal/storage/chunking/Chunking_test.go | 361 ++++++++++++++++++
internal/storage/cloudstorage/aws/AwsS3.go | 12 +-
.../storage/cloudstorage/aws/AwsS3_mock.go | 6 +-
.../storage/cloudstorage/aws/AwsS3_test.go | 8 +-
internal/test/TestHelper.go | 60 ++-
internal/test/TestHelper_test.go | 69 +++-
.../testconfiguration/TestConfiguration.go | 242 +-----------
.../TestConfiguration_test.go | 13 -
internal/webserver/Webserver.go | 48 ++-
internal/webserver/Webserver_test.go | 61 ++-
internal/webserver/api/Api.go | 152 +++++---
internal/webserver/api/Api_test.go | 92 +++++
.../sessionmanager/SessionManager.go | 2 +-
.../downloadstatus/DownloadStatus_test.go | 2 +-
internal/webserver/fileupload/FileUpload.go | 61 ++-
.../webserver/fileupload/FileUpload_test.go | 86 ++++-
.../web/static/apidocumentation/openapi.json | 159 +++++++-
internal/webserver/web/static/js/admin.js | 81 +++-
.../webserver/web/templates/html_admin.tmpl | 10 +-
.../web/templates/string_constants.tmpl | 2 +-
openapi.json | 159 +++++++-
41 files changed, 2040 insertions(+), 598 deletions(-)
create mode 100644 internal/environment/flagparser/FlagParser_test.go
create mode 100644 internal/storage/chunking/Chunking.go
create mode 100644 internal/storage/chunking/Chunking_test.go
diff --git a/build/go.mod b/build/go.mod
index 46a1086..9ec0732 100644
--- a/build/go.mod
+++ b/build/go.mod
@@ -7,10 +7,13 @@ require (
github.com/NYTimes/gziphandler v1.1.1
github.com/aws/aws-sdk-go v1.42.22
github.com/caarlos0/env/v6 v6.9.1
+ github.com/jinzhu/copier v0.3.5
github.com/johannesboyne/gofakes3 v0.0.0-20210415062230-4b6b67a85d38
+ github.com/juju/ratelimit v1.0.2
github.com/secure-io/sio-go v0.3.1
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602
+ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
)
diff --git a/cmd/gokapi/Main.go b/cmd/gokapi/Main.go
index 7fde19d..9fa4a1d 100644
--- a/cmd/gokapi/Main.go
+++ b/cmd/gokapi/Main.go
@@ -29,9 +29,9 @@ import (
// Version is the current version in readable form.
// The go generate call below needs to be modified as well
-const Version = "1.5.2"
+const Version = "1.6.0"
-//go:generate sh "../../build/setVersionTemplate.sh" "1.5.2"
+//go:generate sh "../../build/setVersionTemplate.sh" "1.6.0"
//go:generate sh -c "cp \"$(go env GOROOT)/misc/wasm/wasm_exec.js\" ../../internal/webserver/web/static/js/ && echo Copied wasm_exec.js"
//go:generate sh -c "GOOS=js GOARCH=wasm go build -o ../../internal/webserver/web/main.wasm github.com/forceu/gokapi/cmd/wasmdownloader && echo Compiled WASM module"
diff --git a/go.mod b/go.mod
index 41af52d..9ec0732 100644
--- a/go.mod
+++ b/go.mod
@@ -9,9 +9,11 @@ require (
github.com/caarlos0/env/v6 v6.9.1
github.com/jinzhu/copier v0.3.5
github.com/johannesboyne/gofakes3 v0.0.0-20210415062230-4b6b67a85d38
+ github.com/juju/ratelimit v1.0.2
github.com/secure-io/sio-go v0.3.1
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602
+ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
)
diff --git a/go.sum b/go.sum
index 835fb77..002078f 100644
--- a/go.sum
+++ b/go.sum
@@ -220,6 +220,8 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
+github.com/juju/ratelimit v1.0.2 h1:sRxmtRiajbvrcLQT7S+JbqU0ntsb9W2yhSdNN8tWfaI=
+github.com/juju/ratelimit v1.0.2/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
@@ -468,6 +470,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/internal/configuration/Configuration.go b/internal/configuration/Configuration.go
index 69020c0..c909fef 100644
--- a/internal/configuration/Configuration.go
+++ b/internal/configuration/Configuration.go
@@ -118,7 +118,7 @@ func LoadFromSetup(config models.Configuration, cloudConfig *cloudconfig.CloudCo
Load()
}
-// HashPassword hashes a string with SHA256 the file salt or admin user salt
+// HashPassword hashes a string with SHA1 the file salt or admin user salt
func HashPassword(password string, useFileSalt bool) string {
if useFileSalt {
return HashPasswordCustomSalt(password, serverSettings.Authentication.SaltFiles)
@@ -126,7 +126,7 @@ func HashPassword(password string, useFileSalt bool) string {
return HashPasswordCustomSalt(password, serverSettings.Authentication.SaltAdmin)
}
-// HashPasswordCustomSalt hashes a password with SHA256 and the provided salt
+// HashPasswordCustomSalt hashes a password with SHA1 and the provided salt
func HashPasswordCustomSalt(password, salt string) string {
if password == "" {
return ""
diff --git a/internal/configuration/Configuration_test.go b/internal/configuration/Configuration_test.go
index 9f1de3e..ca5aadb 100644
--- a/internal/configuration/Configuration_test.go
+++ b/internal/configuration/Configuration_test.go
@@ -34,8 +34,6 @@ func TestLoad(t *testing.T) {
test.IsEqualInt(t, serverSettings.MaxMemory, 80)
os.Unsetenv("GOKAPI_MAX_MEMORY_UPLOAD")
Load()
- testconfiguration.WriteUpgradeConfigFileV8()
- Load()
test.IsEqualInt(t, serverSettings.ConfigVersion, configupgrade.CurrentConfigVersion)
testconfiguration.Create(false)
Load()
@@ -87,3 +85,10 @@ func TestLoadFromSetup(t *testing.T) {
test.IsEqualString(t, config.Aws.KeyId, "keyid")
test.IsEqualString(t, serverSettings.ServerUrl, "serverurl")
}
+
+func TestUsesHttps(t *testing.T) {
+ usesHttps = false
+ test.IsEqualBool(t, UsesHttps(), false)
+ usesHttps = true
+ test.IsEqualBool(t, UsesHttps(), true)
+}
diff --git a/internal/configuration/configupgrade/Upgrade.go b/internal/configuration/configupgrade/Upgrade.go
index 11cc801..e6df75d 100644
--- a/internal/configuration/configupgrade/Upgrade.go
+++ b/internal/configuration/configupgrade/Upgrade.go
@@ -1,18 +1,19 @@
package configupgrade
import (
- "encoding/json"
+ "bytes"
+ "encoding/gob"
"fmt"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/environment"
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
+ "github.com/jinzhu/copier"
"os"
- "time"
)
// CurrentConfigVersion is the version of the configuration structure. Used for upgrading
-const CurrentConfigVersion = 11
+const CurrentConfigVersion = 12
// DoUpgrade checks if an old version is present and updates it to the current version if required
func DoUpgrade(settings *models.Configuration, env *environment.Environment) bool {
@@ -28,107 +29,56 @@ func DoUpgrade(settings *models.Configuration, env *environment.Environment) boo
// Upgrades the settings if saved with a previous version
func updateConfig(settings *models.Configuration, env *environment.Environment) {
- // < v1.2.0
- if settings.ConfigVersion < 6 {
- fmt.Println("Please update to version 1.2 before running this version,")
+ // < v1.5.0
+ if settings.ConfigVersion < 11 {
+ fmt.Println("Please update to version 1.5 before running this version,")
osExit(1)
return
}
- // < v1.3.0
- if settings.ConfigVersion < 7 {
- settings.UseSsl = false
- }
- // < v1.3.1
- if settings.ConfigVersion < 8 {
- settings.MaxFileSizeMB = env.MaxFileSize
- }
- // < v1.5.0-dev1
- if settings.ConfigVersion < 10 {
- settings.Authentication.Method = 0 // authentication.AuthenticationInternal
- settings.Authentication.HeaderUsers = []string{}
- settings.Authentication.OauthUsers = []string{}
- legacyConfig := loadLegacyConfigPreAuth(env)
- settings.Authentication.Username = legacyConfig.AdminName
- settings.Authentication.Password = legacyConfig.AdminPassword
- if legacyConfig.SaltAdmin != "" {
- settings.Authentication.SaltAdmin = legacyConfig.SaltAdmin
- }
- if legacyConfig.SaltFiles != "" {
- settings.Authentication.SaltFiles = legacyConfig.SaltFiles
- }
- }
- // < v1.5.0-dev2
- if settings.ConfigVersion < 11 {
- legacyConfig := loadLegacyConfigPreDb(env)
- uploadValues := models.LastUploadValues{
- Downloads: legacyConfig.DefaultDownloads,
- TimeExpiry: legacyConfig.DefaultExpiry,
- Password: legacyConfig.DefaultPassword,
- }
- database.SaveUploadDefaults(uploadValues)
-
- for _, hotlink := range legacyConfig.Hotlinks {
- database.SaveHotlink(models.File{Id: hotlink.FileId, HotlinkId: hotlink.Id})
- }
- for _, apikey := range legacyConfig.ApiKeys {
- database.SaveApiKey(apikey, false)
- }
- for _, file := range legacyConfig.Files {
+ // < v1.6.0
+ if settings.ConfigVersion < 12 {
+ keys := database.GetAllMetaDataIds()
+ for _, key := range keys {
+ raw, ok := database.GetRawKey("file:id:" + key)
+ if !ok {
+ panic("could not read raw key for upgrade")
+ }
+ file := legacyFileToCurrentFile(raw)
database.SaveMetaData(file)
}
- for key, session := range legacyConfig.Sessions {
- database.SaveSession(key, session, 48*time.Hour)
- }
}
}
-func loadLegacyConfigPreAuth(env *environment.Environment) configurationLegacyPreAuth {
- file, err := os.Open(env.ConfigPath)
- defer file.Close()
+func legacyFileToCurrentFile(input []byte) models.File {
+ oldFile := legacyFile{}
+ buf := bytes.NewBuffer(input)
+ dec := gob.NewDecoder(buf)
+ err := dec.Decode(&oldFile)
helper.Check(err)
- decoder := json.NewDecoder(file)
-
- result := configurationLegacyPreAuth{}
- err = decoder.Decode(&result)
+ result := models.File{}
+ err = copier.Copy(&result, oldFile)
helper.Check(err)
+ result.SHA1 = oldFile.SHA256
return result
}
-func loadLegacyConfigPreDb(env *environment.Environment) configurationLegacyPreDb {
- file, err := os.Open(env.ConfigPath)
- defer file.Close()
- helper.Check(err)
- decoder := json.NewDecoder(file)
-
- result := configurationLegacyPreDb{}
- err = decoder.Decode(&result)
- helper.Check(err)
- return result
-}
-
-// configurationLegacyPreAuth is a struct that contains missing values for the global configuration when loading pre v1.5-dev format
-type configurationLegacyPreAuth struct {
- AdminName string `json:"AdminName"`
- AdminPassword string `json:"AdminPassword"`
- SaltAdmin string `json:"SaltAdmin"`
- SaltFiles string `json:"SaltFiles"`
-}
-
-// configurationLegacyPreAuth is a struct that contains missing values for the global configuration when loading pre v1.5 format
-type configurationLegacyPreDb struct {
- DefaultDownloads int `json:"DefaultDownloads"`
- DefaultExpiry int `json:"DefaultExpiry"`
- DefaultPassword string `json:"DefaultPassword"`
- Files map[string]models.File `json:"Files"`
- Hotlinks map[string]Hotlink `json:"Hotlinks"`
- ApiKeys map[string]models.ApiKey `json:"ApiKeys"`
- Sessions map[string]models.Session `json:"Sessions"`
-}
-
-// Hotlink is a legacy struct containing hotlink ids
-type Hotlink struct {
- Id string `json:"Id"`
- FileId string `json:"FileId"`
+type legacyFile struct {
+ Id string `json:"Id"`
+ Name string `json:"Name"`
+ Size string `json:"Size"`
+ SHA256 string `json:"SHA256"`
+ ExpireAt int64 `json:"ExpireAt"`
+ ExpireAtString string `json:"ExpireAtString"`
+ DownloadsRemaining int `json:"DownloadsRemaining"`
+ DownloadCount int `json:"DownloadCount"`
+ PasswordHash string `json:"PasswordHash"`
+ HotlinkId string `json:"HotlinkId"`
+ ContentType string `json:"ContentType"`
+ AwsBucket string `json:"AwsBucket"`
+ Encryption models.EncryptionInfo `json:"Encryption"`
+ UnlimitedDownloads bool `json:"UnlimitedDownloads"`
+ UnlimitedTime bool `json:"UnlimitedTime"`
+ RequiresClientSideDecryption bool `json:"RequiresClientSideDecryption"`
}
var osExit = os.Exit
diff --git a/internal/configuration/configupgrade/Upgrade_test.go b/internal/configuration/configupgrade/Upgrade_test.go
index 089d194..7ee4ce2 100644
--- a/internal/configuration/configupgrade/Upgrade_test.go
+++ b/internal/configuration/configupgrade/Upgrade_test.go
@@ -2,7 +2,6 @@ package configupgrade
import (
"github.com/forceu/gokapi/internal/configuration/database"
- "github.com/forceu/gokapi/internal/environment"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/test"
"github.com/forceu/gokapi/internal/test/testconfiguration"
@@ -10,13 +9,6 @@ import (
"testing"
)
-var oldConfigFile = models.Configuration{
- Authentication: models.AuthenticationConfig{},
- Port: "127.0.0.1:53844",
- ServerUrl: "https://gokapi.url/",
- RedirectUrl: "https://github.com/Forceu/Gokapi/",
-}
-
func TestMain(m *testing.M) {
testconfiguration.Create(false)
exitVal := m.Run()
@@ -24,33 +16,34 @@ func TestMain(m *testing.M) {
os.Exit(exitVal)
}
-func TestUpgradeDb(t *testing.T) {
- testconfiguration.WriteUpgradeConfigFileV0()
- os.Setenv("GOKAPI_MAX_FILESIZE", "5")
-
- env := environment.New()
- bufferConfig := oldConfigFile
- wasExit := false
- osExit = func(code int) {
- wasExit = true
- }
- _ = DoUpgrade(&bufferConfig, &env)
- test.IsEqualBool(t, wasExit, true)
-
- oldConfigFile.ConfigVersion = 8
- database.Init("./test/filestorage.db")
- testconfiguration.WriteUpgradeConfigFileV8()
- upgradeDone := DoUpgrade(&oldConfigFile, &env)
- test.IsEqualBool(t, upgradeDone, true)
- test.IsEqualString(t, oldConfigFile.Authentication.SaltAdmin, "LW6fW4Pjv8GtdWVLSZD66gYEev6NAaXxOVBw7C")
- test.IsEqualString(t, oldConfigFile.Authentication.SaltFiles, "lL5wMTtnVCn5TPbpRaSe4vAQodWW0hgk00WCZE")
- // TODO write further tests
- os.Unsetenv("GOKAPI_MAX_FILESIZE")
- oldConfigFile.ConfigVersion = CurrentConfigVersion
- upgradeDone = DoUpgrade(&oldConfigFile, &env)
- test.IsEqualBool(t, upgradeDone, false)
- oldConfigFile.ConfigVersion = 6
- upgradeDone = DoUpgrade(&oldConfigFile, &env)
- test.IsEqualBool(t, upgradeDone, true)
- test.IsEqualBool(t, oldConfigFile.UseSsl, false)
+var oldConfigFile = models.Configuration{
+ Authentication: models.AuthenticationConfig{},
+ Port: "127.0.0.1:53844",
+ ServerUrl: "https://gokapi.url/",
+ RedirectUrl: "https://github.com/Forceu/Gokapi/",
+}
+
+func TestUpgradeDb(t *testing.T) {
+ exitCode := 0
+ osExit = func(code int) {
+ exitCode = code
+ }
+ oldConfigFile.ConfigVersion = 10
+ upgradeDone := DoUpgrade(&oldConfigFile, nil)
+ test.IsEqualBool(t, upgradeDone, true)
+ test.IsEqualInt(t, exitCode, 1)
+
+ database.Init("./test/filestorage.db")
+ exitCode = 0
+ oldConfigFile.ConfigVersion = 11
+ upgradeDone = DoUpgrade(&oldConfigFile, nil)
+ test.IsEqualBool(t, upgradeDone, true)
+ test.IsEqualInt(t, exitCode, 0)
+
+ exitCode = 0
+ oldConfigFile.ConfigVersion = CurrentConfigVersion
+ upgradeDone = DoUpgrade(&oldConfigFile, nil)
+ test.IsEqualBool(t, upgradeDone, false)
+ test.IsEqualInt(t, exitCode, 0)
+
}
diff --git a/internal/configuration/database/Database.go b/internal/configuration/database/Database.go
index a2c428d..5ee4ec8 100644
--- a/internal/configuration/database/Database.go
+++ b/internal/configuration/database/Database.go
@@ -69,14 +69,7 @@ func GetAllMetadata() map[string]models.File {
panic("Database not loaded!")
}
result := make(map[string]models.File)
- var keys []string
- err := bitcaskDb.Scan([]byte(prefixFile), func(key []byte) error {
- fileId := strings.Replace(string(key), prefixFile, "", 1)
- keys = append(keys, fileId)
- return nil
- })
-
- helper.Check(err)
+ keys := GetAllMetaDataIds()
for _, key := range keys {
file, ok := GetMetaDataById(key)
@@ -88,6 +81,20 @@ func GetAllMetadata() map[string]models.File {
return result
}
+func GetAllMetaDataIds() []string {
+ if bitcaskDb == nil {
+ panic("Database not loaded!")
+ }
+ var keys []string
+ err := bitcaskDb.Scan([]byte(prefixFile), func(key []byte) error {
+ fileId := strings.Replace(string(key), prefixFile, "", 1)
+ keys = append(keys, fileId)
+ return nil
+ })
+ helper.Check(err)
+ return keys
+}
+
// GetMetaDataById returns a models.File,true from the ID passed or false if the id is not valid
func GetMetaDataById(id string) (models.File, bool) {
result := models.File{}
@@ -319,6 +326,10 @@ func getValue(id string) ([]byte, bool) {
panic(err)
}
+func GetRawKey(id string) ([]byte, bool) {
+ return getValue(id)
+}
+
func expiryToDuration(file models.File) time.Duration {
return time.Until(time.Unix(file.ExpireAt, 0))
}
diff --git a/internal/configuration/database/Database_test.go b/internal/configuration/database/Database_test.go
index 2d7c846..11e5556 100644
--- a/internal/configuration/database/Database_test.go
+++ b/internal/configuration/database/Database_test.go
@@ -68,6 +68,11 @@ func TestHotlink(t *testing.T) {
DeleteHotlink("testlink")
_, ok = GetHotlink("testlink")
test.IsEqualBool(t, ok, false)
+
+ SaveHotlink(models.File{Id: "testhfile", Name: "testh.txt", HotlinkId: "testlink", ExpireAt: 0, UnlimitedTime: true})
+ hotlink, ok = GetHotlink("testlink")
+ test.IsEqualBool(t, ok, true)
+ test.IsEqualString(t, hotlink, "testhfile")
}
func TestApiKey(t *testing.T) {
diff --git a/internal/configuration/setup/templates/setup.tmpl b/internal/configuration/setup/templates/setup.tmpl
index 90a0e3a..cb12e23 100644
--- a/internal/configuration/setup/templates/setup.tmpl
+++ b/internal/configuration/setup/templates/setup.tmpl
@@ -265,7 +265,8 @@
/apiKeys
/apiNew
/delete
- /upload
+ /uploadChunk
+ /uploadComplete
Only proceed if you know what you are doing!
@@ -422,6 +423,7 @@ function TestAWS(button) {
Does not support download progress bar
Gokapi starts without user input
Warning: Password can be read with access to Gokapi configuration
+ Warning: During upload temporary files containing the plaintext content may be created.
Warning: Encryption has not been audited.
@@ -435,6 +437,7 @@ function TestAWS(button) {
Does not support download progress bar
Password cannot be read with access to Gokapi configuration
Warning: Gokapi requires user input to start
+ Warning: During upload temporary files containing the plaintext content may be created.
Warning: Encryption has not been audited.
@@ -450,6 +453,7 @@ function TestAWS(button) {
Gokapi starts without user input
Important: For remote storage, CORS settings for the bucket need to allow access from the Gokapi URL
Warning: Password can be read with access to Gokapi configuration
+ Warning: During upload temporary files containing the plaintext content may be created.
Warning: Encryption has not been audited.
@@ -465,6 +469,7 @@ function TestAWS(button) {
Password cannot be read with access to Gokapi configuration
Important: For remote storage, CORS settings for the bucket need to allow access from the Gokapi URL
Warning: Gokapi requires user input to start
+ Warning: During upload temporary files containing the plaintext content may be created.
Warning: Encryption has not been audited.
@@ -479,6 +484,7 @@ function TestAWS(button) {
Does not support download progress bar
Gokapi starts without user input
Password cannot be read with access to Gokapi configuration
+ Warning: During upload temporary files containing the plaintext content may be created.
Warning: Encryption has not been audited.
diff --git a/internal/encryption/Encryption.go b/internal/encryption/Encryption.go
index b01b705..607b82a 100644
--- a/internal/encryption/Encryption.go
+++ b/internal/encryption/Encryption.go
@@ -294,3 +294,27 @@ func GetRandomCipher() ([]byte, error) {
func GetRandomNonce() ([]byte, error) {
return getRandomData(nonceSize)
}
+
+type AutoGenerated struct {
+ Result string `json:"Result"`
+ FileInfo struct {
+ ID string `json:"Id"`
+ Name string `json:"Name"`
+ Size string `json:"Size"`
+ HotlinkID string `json:"HotlinkId"`
+ ContentType string `json:"ContentType"`
+ ExpireAt int `json:"ExpireAt"`
+ ExpireAtString string `json:"ExpireAtString"`
+ DownloadsRemaining int `json:"DownloadsRemaining"`
+ DownloadCount int `json:"DownloadCount"`
+ UnlimitedDownloads bool `json:"UnlimitedDownloads"`
+ UnlimitedTime bool `json:"UnlimitedTime"`
+ RequiresClientSideDecryption bool `json:"RequiresClientSideDecryption"`
+ IsEncrypted bool `json:"IsEncrypted"`
+ IsPasswordProtected bool `json:"IsPasswordProtected"`
+ IsSavedOnLocalStorage bool `json:"IsSavedOnLocalStorage"`
+ } `json:"FileInfo"`
+ URL string `json:"Url"`
+ HotlinkURL string `json:"HotlinkUrl"`
+ GenericHotlinkURL string `json:"GenericHotlinkUrl"`
+}
diff --git a/internal/environment/flagparser/FlagParser_test.go b/internal/environment/flagparser/FlagParser_test.go
new file mode 100644
index 0000000..dba4dc2
--- /dev/null
+++ b/internal/environment/flagparser/FlagParser_test.go
@@ -0,0 +1 @@
+package flagparser
diff --git a/internal/helper/StringGeneration.go b/internal/helper/StringGeneration.go
index 947d5f4..f769593 100644
--- a/internal/helper/StringGeneration.go
+++ b/internal/helper/StringGeneration.go
@@ -69,5 +69,4 @@ func cleanRandomString(input string) string {
reg, err := regexp.Compile("[^a-zA-Z0-9]+")
Check(err)
return reg.ReplaceAllString(input, "")
-
}
diff --git a/internal/models/FileList.go b/internal/models/FileList.go
index fbc172a..2a0cefb 100644
--- a/internal/models/FileList.go
+++ b/internal/models/FileList.go
@@ -11,7 +11,7 @@ type File struct {
Id string `json:"Id"`
Name string `json:"Name"`
Size string `json:"Size"`
- SHA256 string `json:"SHA256"`
+ SHA1 string `json:"SHA1"`
ExpireAt int64 `json:"ExpireAt"`
ExpireAtString string `json:"ExpireAtString"`
DownloadsRemaining int `json:"DownloadsRemaining"`
@@ -31,7 +31,6 @@ type FileApiOutput struct {
Id string `json:"Id"`
Name string `json:"Name"`
Size string `json:"Size"`
- SHA256 string `json:"SHA256"`
HotlinkId string `json:"HotlinkId"`
ContentType string `json:"ContentType"`
ExpireAt int64 `json:"ExpireAt"`
@@ -53,6 +52,10 @@ type EncryptionInfo struct {
Nonce []byte `json:"Nonce"`
}
+func (f *File) IsLocalStorage() bool {
+ return f.AwsBucket == ""
+}
+
func (f *File) ToFileApiOutput() (FileApiOutput, error) {
var result FileApiOutput
err := copier.Copy(&result, &f)
diff --git a/internal/models/FileList_test.go b/internal/models/FileList_test.go
index 872e7b5..703ad3f 100644
--- a/internal/models/FileList_test.go
+++ b/internal/models/FileList_test.go
@@ -1,6 +1,7 @@
package models
import (
+ "errors"
"github.com/forceu/gokapi/internal/test"
"testing"
)
@@ -10,7 +11,7 @@ func TestToJsonResult(t *testing.T) {
Id: "testId",
Name: "testName",
Size: "10 B",
- SHA256: "sha256",
+ SHA1: "sha256",
ExpireAt: 50,
ExpireAtString: "future",
DownloadsRemaining: 1,
@@ -27,5 +28,17 @@ func TestToJsonResult(t *testing.T) {
UnlimitedDownloads: true,
UnlimitedTime: true,
}
- test.IsEqualString(t, file.ToJsonResult("serverurl/"), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","SHA256":"sha256","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAt":50,"ExpireAtString":"future","DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":false,"IsEncrypted":true,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false},"Url":"serverurl/d?id=","HotlinkUrl":"serverurl/hotlink/","GenericHotlinkUrl":"serverurl/downloadFile?id="}`)
+ test.IsEqualString(t, file.ToJsonResult("serverurl/"), `{"Result":"OK","FileInfo":{"Id":"testId","Name":"testName","Size":"10 B","HotlinkId":"hotlinkid","ContentType":"text/html","ExpireAt":50,"ExpireAtString":"future","DownloadsRemaining":1,"DownloadCount":3,"UnlimitedDownloads":true,"UnlimitedTime":true,"RequiresClientSideDecryption":false,"IsEncrypted":true,"IsPasswordProtected":true,"IsSavedOnLocalStorage":false},"Url":"serverurl/d?id=","HotlinkUrl":"serverurl/hotlink/","GenericHotlinkUrl":"serverurl/downloadFile?id="}`)
+}
+
+func TestIsLocalStorage(t *testing.T) {
+ file := File{AwsBucket: "123"}
+ test.IsEqualBool(t, file.IsLocalStorage(), false)
+ file.AwsBucket = ""
+ test.IsEqualBool(t, file.IsLocalStorage(), true)
+}
+
+func TestErrorAsJson(t *testing.T) {
+ result := errorAsJson(errors.New("testerror"))
+ test.IsEqualString(t, result, "{\"Result\":\"error\",\"ErrorMessage\":\"testerror\"}")
}
diff --git a/internal/models/FileUpload.go b/internal/models/FileUpload.go
index 700c133..2da40fa 100644
--- a/internal/models/FileUpload.go
+++ b/internal/models/FileUpload.go
@@ -8,7 +8,6 @@ type UploadRequest struct {
Password string
ExternalUrl string
MaxMemory int
- DataDir string
UnlimitedDownload bool
UnlimitedTime bool
}
diff --git a/internal/storage/FileServing.go b/internal/storage/FileServing.go
index 554c851..bb150c8 100644
--- a/internal/storage/FileServing.go
+++ b/internal/storage/FileServing.go
@@ -16,6 +16,7 @@ import (
"github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/logging"
"github.com/forceu/gokapi/internal/models"
+ "github.com/forceu/gokapi/internal/storage/chunking"
"github.com/forceu/gokapi/internal/storage/cloudstorage/aws"
"github.com/forceu/gokapi/internal/webserver/downloadstatus"
"github.com/jinzhu/copier"
@@ -31,41 +32,28 @@ import (
"time"
)
-// NewFile creates a new file in the system. Called after an upload has been completed. If a file with the same sha256 hash
+var ErrorFileTooLarge = errors.New("upload limit exceeded")
+
+// NewFile creates a new file in the system. Called after an upload from the API has been completed. If a file with the same sha1 hash
// already exists, it is deduplicated. This function gathers information about the file, creates an ID and saves
// it into the global configuration.
func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, uploadRequest models.UploadRequest) (models.File, error) {
if fileHeader.Size > int64(configuration.Get().MaxFileSizeMB)*1024*1024 {
- return models.File{}, errors.New("upload limit exceeded")
+ return models.File{}, ErrorFileTooLarge
}
var hasBeenRenamed bool
- reader, hash, tempFile, encInfo := generateHash(fileContent, fileHeader, uploadRequest)
+ reader, hash, tempFile, encInfo := generateHash(fileContent, fileHeader)
defer deleteTempFile(tempFile, &hasBeenRenamed)
- id := createNewId()
- file := models.File{
- Id: id,
- Encryption: encInfo,
- Name: fileHeader.Filename,
- SHA256: hex.EncodeToString(hash),
- Size: helper.ByteCountSI(fileHeader.Size),
- ExpireAt: uploadRequest.ExpiryTimestamp,
- ExpireAtString: time.Unix(uploadRequest.ExpiryTimestamp, 0).Format("2006-01-02 15:04"),
- DownloadsRemaining: uploadRequest.AllowedDownloads,
- UnlimitedTime: uploadRequest.UnlimitedTime,
- UnlimitedDownloads: uploadRequest.UnlimitedDownload,
- PasswordHash: configuration.HashPassword(uploadRequest.Password, true),
- ContentType: fileHeader.Header.Get("Content-Type"),
+ header, err := chunking.ParseMultipartHeader(fileHeader)
+ if err != nil {
+ return models.File{}, err
}
- if aws.IsAvailable() {
- if !configuration.Get().PicturesAlwaysLocal || !isPictureFile(file.Name) {
- aws.AddBucketName(&file)
- }
- }
- addHotlink(&file)
- filename := configuration.Get().DataDir + "/" + file.SHA256
+ file := createNewMetaData(hex.EncodeToString(hash), header, uploadRequest)
+ file.Encryption = encInfo
+ filename := configuration.Get().DataDir + "/" + file.SHA1
dataDir := configuration.Get().DataDir
- if file.AwsBucket != "" {
+ if !file.IsLocalStorage() {
exists, size, err := aws.FileExists(file)
if err != nil {
return models.File{}, err
@@ -83,9 +71,9 @@ func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, uploadRequ
fileWithHashExists := FileExists(file, configuration.Get().DataDir)
if fileWithHashExists {
encryptionLevel := configuration.Get().Encryption.Level
- previousEncryption, ok := getEncInfoFromExistingFile(file.SHA256)
+ previousEncryption, ok := getEncInfoFromExistingFile(file.SHA1)
if !ok && encryptionLevel != encryption.NoEncryption && encryptionLevel != encryption.EndToEndEncryption {
- err := os.Remove(dataDir + "/" + file.SHA256)
+ err = os.Remove(dataDir + "/" + file.SHA1)
helper.Check(err)
fileWithHashExists = false
} else {
@@ -95,9 +83,9 @@ func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, uploadRequ
if !fileWithHashExists {
if tempFile != nil {
- err := tempFile.Close()
+ err = tempFile.Close()
helper.Check(err)
- err = os.Rename(tempFile.Name(), dataDir+"/"+file.SHA256)
+ err = os.Rename(tempFile.Name(), dataDir+"/"+file.SHA1)
helper.Check(err)
hasBeenRenamed = true
database.SaveMetaData(file)
@@ -117,6 +105,178 @@ func NewFile(fileContent io.Reader, fileHeader *multipart.FileHeader, uploadRequ
return file, nil
}
+func validateChunkInfo(file *os.File, fileHeader chunking.FileHeader) error {
+ maxFileSizeB := int64(configuration.Get().MaxFileSizeMB) * 1024 * 1024
+ if fileHeader.Size > maxFileSizeB {
+ return ErrorFileTooLarge
+ }
+ size, err := helper.GetFileSize(file)
+ if err != nil {
+ return err
+ }
+ if size != fileHeader.Size {
+ return errors.New("total filesize does not match")
+ }
+ return nil
+}
+
+func NewFileFromChunk(chunkId string, fileHeader chunking.FileHeader, uploadRequest models.UploadRequest) (models.File, error) {
+ if chunkId == "" {
+ return models.File{}, errors.New("empty chunk id provided")
+ }
+ if !helper.FileExists(configuration.Get().DataDir + "/chunk-" + chunkId) {
+ time.Sleep(1 * time.Second)
+ return models.File{}, errors.New("chunk file does not exist")
+ }
+ file, err := chunking.GetFileByChunkId(chunkId)
+ if err != nil {
+ return models.File{}, err
+ }
+ defer file.Close()
+ err = validateChunkInfo(file, fileHeader)
+ if err != nil {
+ return models.File{}, err
+ }
+
+ hash, err := hashFile(file, isEncryptionRequested())
+ if err != nil {
+ _ = file.Close()
+ return models.File{}, err
+ }
+
+ metaData := createNewMetaData(hash, fileHeader, uploadRequest)
+
+ fileExists := FileExists(metaData, configuration.Get().DataDir)
+ if fileExists {
+ encryptionLevel := configuration.Get().Encryption.Level
+ previousEncryption, ok := getEncInfoFromExistingFile(metaData.SHA1)
+ if !ok && encryptionLevel != encryption.NoEncryption && encryptionLevel != encryption.EndToEndEncryption {
+ err = os.Remove(configuration.Get().DataDir + "/" + metaData.SHA1)
+ helper.Check(err)
+ fileExists = false
+ } else {
+ metaData.Encryption = previousEncryption
+ }
+ }
+
+ if fileExists {
+ err = file.Close()
+ if err != nil {
+ return models.File{}, err
+ }
+ err = os.Remove(file.Name())
+ if err != nil {
+ return models.File{}, err
+ }
+ }
+ if !isEncryptionRequested() {
+ if !fileExists {
+ _, err = file.Seek(0, io.SeekStart)
+ if err != nil {
+ return models.File{}, err
+ }
+ if !metaData.IsLocalStorage() {
+ _, err = aws.Upload(file, metaData)
+ if err != nil {
+ return models.File{}, err
+ }
+ database.SaveMetaData(metaData)
+ err = os.Remove(file.Name())
+ helper.Check(err)
+ return metaData, nil
+ }
+ err = os.Rename(file.Name(), configuration.Get().DataDir+"/"+metaData.SHA1)
+ if err != nil {
+ return models.File{}, err
+ }
+ }
+ database.SaveMetaData(metaData)
+ return metaData, nil
+ }
+ if !fileExists {
+ tempFile, err := encryptChunkFile(file, &metaData)
+ defer func() {
+ _ = file.Close()
+ _ = os.Remove(file.Name())
+ _ = tempFile.Close()
+ _ = os.Remove(tempFile.Name())
+ }()
+ if err != nil {
+ return models.File{}, err
+ }
+ if !metaData.IsLocalStorage() {
+ _, err = aws.Upload(tempFile, metaData)
+ if err != nil {
+ return models.File{}, err
+ }
+ tempFile.Close()
+ database.SaveMetaData(metaData)
+ return metaData, nil
+ }
+ tempFile.Close()
+ err = os.Rename(tempFile.Name(), configuration.Get().DataDir+"/"+metaData.SHA1)
+ if err != nil {
+ return models.File{}, err
+ }
+ }
+
+ database.SaveMetaData(metaData)
+ return metaData, nil
+}
+
+func encryptChunkFile(file *os.File, metadata *models.File) (*os.File, error) {
+ _, err := file.Seek(0, io.SeekStart)
+ if err != nil {
+ return nil, err
+ }
+ tempFileEnc, err := os.CreateTemp(configuration.Get().DataDir, "upload")
+ if err != nil {
+ return nil, err
+ }
+ encInfo := metadata.Encryption
+ err = encryption.Encrypt(&encInfo, file, tempFileEnc)
+ if err != nil {
+ return nil, err
+ }
+ _, err = tempFileEnc.Seek(0, io.SeekStart)
+ if err != nil {
+ return nil, err
+ }
+ metadata.Encryption = encInfo
+ err = file.Close()
+ if err != nil {
+ return nil, err
+ }
+ err = os.Remove(file.Name())
+ if err != nil {
+ return nil, err
+ }
+ return tempFileEnc, nil
+}
+
+func createNewMetaData(hash string, fileHeader chunking.FileHeader, uploadRequest models.UploadRequest) models.File {
+ file := models.File{
+ Id: createNewId(),
+ Name: fileHeader.Filename,
+ SHA1: hash,
+ Size: helper.ByteCountSI(fileHeader.Size),
+ ContentType: fileHeader.ContentType,
+ ExpireAt: uploadRequest.ExpiryTimestamp,
+ ExpireAtString: time.Unix(uploadRequest.ExpiryTimestamp, 0).Format("2006-01-02 15:04"),
+ DownloadsRemaining: uploadRequest.AllowedDownloads,
+ UnlimitedTime: uploadRequest.UnlimitedTime,
+ UnlimitedDownloads: uploadRequest.UnlimitedDownload,
+ PasswordHash: configuration.HashPassword(uploadRequest.Password, true),
+ }
+ if aws.IsAvailable() {
+ if !configuration.Get().PicturesAlwaysLocal || !isPictureFile(file.Name) {
+ aws.AddBucketName(&file)
+ }
+ }
+ addHotlink(&file)
+ return file
+}
+
func createNewId() string {
return helper.GenerateRandomString(configuration.Get().LengthId)
}
@@ -128,7 +288,7 @@ func getEncInfoFromExistingFile(hash string) (models.EncryptionInfo, bool) {
}
allFiles := database.GetAllMetadata()
for _, existingFile := range allFiles {
- if existingFile.SHA256 == hash {
+ if existingFile.SHA1 == hash {
return existingFile.Encryption, true
}
}
@@ -198,12 +358,24 @@ func DeleteAllEncrypted() {
}
}
+func hashFile(input io.Reader, useSalt bool) (string, error) {
+ hash := sha1.New()
+ _, err := io.Copy(hash, input)
+ if err != nil {
+ return "", err
+ }
+ if useSalt {
+ hash.Write([]byte(configuration.Get().Authentication.SaltFiles))
+ }
+ return hex.EncodeToString(hash.Sum(nil)), nil
+}
+
// Generates the SHA1 hash of an uploaded file and returns a reader for the file, the hash and if a temporary file was created the
// reference to that file.
-func generateHash(fileContent io.Reader, fileHeader *multipart.FileHeader, uploadRequest models.UploadRequest) (io.Reader, []byte, *os.File, models.EncryptionInfo) {
+func generateHash(fileContent io.Reader, fileHeader *multipart.FileHeader) (io.Reader, []byte, *os.File, models.EncryptionInfo) {
hash := sha1.New()
encInfo := models.EncryptionInfo{}
- if fileHeader.Size <= int64(uploadRequest.MaxMemory)*1024*1024 {
+ if fileHeader.Size <= int64(configuration.Get().MaxMemory)*1024*1024 {
content, err := ioutil.ReadAll(fileContent)
helper.Check(err)
hash.Write(content)
@@ -216,7 +388,7 @@ func generateHash(fileContent io.Reader, fileHeader *multipart.FileHeader, uploa
}
return bytes.NewReader(content), hash.Sum(nil), nil, encInfo
}
- tempFile, err := os.CreateTemp(uploadRequest.DataDir, "upload")
+ tempFile, err := os.CreateTemp(configuration.Get().DataDir, "upload")
helper.Check(err)
var multiWriter io.Writer
@@ -227,7 +399,7 @@ func generateHash(fileContent io.Reader, fileHeader *multipart.FileHeader, uploa
helper.Check(err)
if isEncryptionRequested() {
- tempFileEnc, err := os.CreateTemp(uploadRequest.DataDir, "upload")
+ tempFileEnc, err := os.CreateTemp(configuration.Get().DataDir, "upload")
helper.Check(err)
encryption.Encrypt(&encInfo, tempFile, tempFileEnc)
err = os.Remove(tempFile.Name())
@@ -257,7 +429,7 @@ func isEncryptionRequested() bool {
}
}
-var imageFileExtensions = []string{".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".svg"}
+var imageFileExtensions = []string{".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".svg", ".tiff", ".tif", ".ico"}
// If file is an image, create link for hotlinking
func addHotlink(file *models.File) {
@@ -321,7 +493,7 @@ func RequiresClientDecryption(file models.File) bool {
if !file.Encryption.IsEncrypted {
return false
}
- return file.AwsBucket != ""
+ return !file.IsLocalStorage()
}
// ServeFile subtracts a download allowance and serves the file to the browser
@@ -331,8 +503,7 @@ func ServeFile(file models.File, w http.ResponseWriter, r *http.Request, forceDo
database.SaveMetaData(file)
logging.AddDownload(&file, r)
- // If file is stored on AWS
- if file.AwsBucket != "" {
+ if !file.IsLocalStorage() {
// We are not setting a download complete status as there is no reliable way to
// confirm that the file has been completely downloaded. It expires automatically after 24 hours.
downloadstatus.SetDownload(file)
@@ -378,7 +549,7 @@ func writeDownloadHeaders(file models.File, w http.ResponseWriter, forceDownload
}
func getFileHandler(file models.File, dataDir string) (*os.File, int64) {
- storageData, err := os.OpenFile(dataDir+"/"+file.SHA256, os.O_RDONLY, 0644)
+ storageData, err := os.OpenFile(dataDir+"/"+file.SHA1, os.O_RDONLY, 0644)
helper.Check(err)
size, err := helper.GetFileSize(storageData)
helper.Check(err)
@@ -387,15 +558,21 @@ func getFileHandler(file models.File, dataDir string) (*os.File, int64) {
// FileExists checks if the file exists locally or in S3
func FileExists(file models.File, dataDir string) bool {
- if file.AwsBucket != "" {
- result, _, err := aws.FileExists(file)
+ if !file.IsLocalStorage() {
+ exists, size, err := aws.FileExists(file)
if err != nil {
fmt.Println("Warning, cannot check file " + file.Id + ": " + err.Error())
return true
}
- return result
+ if !exists {
+ return false
+ }
+ if size == 0 && file.Size != "0 B" {
+ return false
+ }
+ return true
}
- return helper.FileExists(dataDir + "/" + file.SHA256)
+ return helper.FileExists(dataDir + "/" + file.SHA1)
}
// CleanUp removes expired files from the config and from the filesystem if they are not referenced by other files anymore
@@ -410,7 +587,7 @@ func CleanUp(periodic bool) {
if !fileExists || isExpiredFileWithoutDownload(element, timeNow) {
deleteFile := true
for _, secondLoopElement := range database.GetAllMetadata() {
- if (element.Id != secondLoopElement.Id) && (element.SHA256 == secondLoopElement.SHA256) {
+ if (element.Id != secondLoopElement.Id) && (element.SHA1 == secondLoopElement.SHA1) {
deleteFile = false
}
}
@@ -427,6 +604,8 @@ func CleanUp(periodic bool) {
if wasItemDeleted {
CleanUp(false)
}
+ cleanOldTempFiles()
+
if periodic {
go func() {
select {
@@ -438,6 +617,38 @@ func CleanUp(periodic bool) {
database.RunGarbageCollection()
}
+func cleanOldTempFiles() {
+ tmpfiles, err := os.ReadDir(configuration.Get().DataDir)
+ if err != nil {
+ fmt.Println(err)
+ return
+ }
+ for _, file := range tmpfiles {
+ if isOldTempFile(file) {
+ err = os.Remove(configuration.Get().DataDir + "/" + file.Name())
+ if err != nil {
+ fmt.Println(err)
+ }
+ }
+ }
+}
+
+// Returns true if a file is older than 24 hours and starts with the name upload or chunk
+func isOldTempFile(file os.DirEntry) bool {
+ if file.IsDir() {
+ return false
+ }
+ if !strings.HasPrefix(file.Name(), "upload") && !strings.HasPrefix(file.Name(), "chunk-") {
+ return false
+ }
+ info, err := file.Info()
+ if err != nil {
+ return false
+ }
+ return time.Now().Sub(info.ModTime()) > 24*time.Hour
+
+}
+
// IsExpiredFile returns true if the file is expired, either due to download count
// or if the provided timestamp is after the expiry timestamp
func IsExpiredFile(file models.File, timeNow int64) bool {
@@ -454,10 +665,10 @@ func isExpiredFileWithoutDownload(file models.File, timeNow int64) bool {
func deleteSource(file models.File, dataDir string) {
var err error
- if file.AwsBucket != "" {
+ if !file.IsLocalStorage() {
_, err = aws.DeleteObject(file)
} else {
- err = os.Remove(dataDir + "/" + file.SHA256)
+ err = os.Remove(dataDir + "/" + file.SHA1)
}
if err != nil {
fmt.Println("Warning, cannot delete file " + file.Id + ": " + err.Error())
diff --git a/internal/storage/FileServing_test.go b/internal/storage/FileServing_test.go
index dd9a46e..7f52396 100644
--- a/internal/storage/FileServing_test.go
+++ b/internal/storage/FileServing_test.go
@@ -6,7 +6,9 @@ import (
"github.com/forceu/gokapi/internal/configuration/cloudconfig"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/encryption"
+ "github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
+ "github.com/forceu/gokapi/internal/storage/chunking"
"github.com/forceu/gokapi/internal/storage/cloudstorage/aws"
"github.com/forceu/gokapi/internal/test"
"github.com/forceu/gokapi/internal/test/testconfiguration"
@@ -54,7 +56,7 @@ func TestGetFile(t *testing.T) {
file = models.File{
Id: "testget",
Name: "testget",
- SHA256: "testget",
+ SHA1: "testget",
UnlimitedDownloads: true,
UnlimitedTime: true,
}
@@ -69,9 +71,9 @@ func TestGetEncInfoFromExistingFile(t *testing.T) {
_, result := getEncInfoFromExistingFile("testhash")
test.IsEqualBool(t, result, true)
file := models.File{
- Id: "testhash",
- Name: "testhash",
- SHA256: "testhash",
+ Id: "testhash",
+ Name: "testhash",
+ SHA1: "testhash",
Encryption: models.EncryptionInfo{
IsEncrypted: true,
DecryptionKey: nil,
@@ -132,9 +134,8 @@ type testFile struct {
Content []byte
}
-func createTestFile() (testFile, error) {
+func createRawTestFile(content []byte) (multipart.FileHeader, models.UploadRequest) {
os.Setenv("TZ", "UTC")
- content := []byte("This is a file for testing purposes")
mimeHeader := make(textproto.MIMEHeader)
mimeHeader.Set("Content-Disposition", "form-data; name=\"file\"; filename=\"test.dat\"")
mimeHeader.Set("Content-Type", "text/plain")
@@ -148,8 +149,13 @@ func createTestFile() (testFile, error) {
Expiry: 999,
ExpiryTimestamp: 2147483600,
MaxMemory: 10,
- DataDir: "test/data",
}
+ return header, request
+}
+
+func createTestFile() (testFile, error) {
+ content := []byte("This is a file for testing purposes")
+ header, request := createRawTestFile(content)
file, err := NewFile(bytes.NewReader(content), &header, request)
return testFile{
File: file,
@@ -159,8 +165,23 @@ func createTestFile() (testFile, error) {
}, err
}
-func TestNewFile(t *testing.T) {
+func createTestChunk() (string, chunking.FileHeader, models.UploadRequest, error) {
+ content := []byte("This is a file for chunk testing purposes")
+ header, request := createRawTestFile(content)
+ chunkId := helper.GenerateRandomString(15)
+ fileheader := chunking.FileHeader{
+ Filename: header.Filename,
+ ContentType: header.Header.Get("Content-Type"),
+ Size: header.Size,
+ }
+ err := ioutil.WriteFile("test/data/chunk-"+chunkId, content, 0600)
+ if err != nil {
+ return "", chunking.FileHeader{}, models.UploadRequest{}, err
+ }
+ return chunkId, fileheader, request, nil
+}
+func TestNewFile(t *testing.T) {
newFile, err := createTestFile()
file := newFile.File
request := newFile.Request
@@ -171,7 +192,7 @@ func TestNewFile(t *testing.T) {
retrievedFile, ok := database.GetMetaDataById(file.Id)
test.IsEqualBool(t, ok, true)
test.IsEqualString(t, retrievedFile.Name, "test.dat")
- test.IsEqualString(t, retrievedFile.SHA256, "f1474c19eff0fc8998fa6e1b1f7bf31793b103a6")
+ test.IsEqualString(t, retrievedFile.SHA1, "f1474c19eff0fc8998fa6e1b1f7bf31793b103a6")
test.IsEqualString(t, retrievedFile.HotlinkId, "")
test.IsEqualString(t, retrievedFile.PasswordHash, "")
test.IsEqualString(t, retrievedFile.Size, "35 B")
@@ -212,7 +233,6 @@ func TestNewFile(t *testing.T) {
Expiry: 999,
ExpiryTimestamp: 2147483600,
MaxMemory: 10,
- DataDir: "test/data",
}
// Also testing renaming of temp file
file, err = NewFile(bigFile, &header, request)
@@ -220,13 +240,13 @@ func TestNewFile(t *testing.T) {
retrievedFile, ok = database.GetMetaDataById(file.Id)
test.IsEqualBool(t, ok, true)
test.IsEqualString(t, retrievedFile.Name, "bigfile")
- test.IsEqualString(t, retrievedFile.SHA256, "9674344c90c2f0646f0b78026e127c9b86e3ad77")
+ test.IsEqualString(t, retrievedFile.SHA1, "9674344c90c2f0646f0b78026e127c9b86e3ad77")
test.IsEqualString(t, retrievedFile.Size, "20.0 MB")
_, err = bigFile.Seek(0, io.SeekStart)
test.IsNil(t, err)
// Testing removal of temp file
test.IsEqualString(t, retrievedFile.Name, "bigfile")
- test.IsEqualString(t, retrievedFile.SHA256, "9674344c90c2f0646f0b78026e127c9b86e3ad77")
+ test.IsEqualString(t, retrievedFile.SHA1, "9674344c90c2f0646f0b78026e127c9b86e3ad77")
test.IsEqualString(t, retrievedFile.Size, "20.0 MB")
bigFile.Close()
os.Remove("bigfile")
@@ -246,7 +266,6 @@ func TestNewFile(t *testing.T) {
Expiry: 999,
ExpiryTimestamp: 2147483600,
MaxMemory: 10,
- DataDir: "test/data",
}
file, err = NewFile(bigFile, &header, request)
test.IsNotNil(t, err)
@@ -269,7 +288,7 @@ func TestNewFile(t *testing.T) {
test.IsNil(t, err)
retrievedFile, ok = database.GetMetaDataById(newFile.File.Id)
test.IsEqualBool(t, ok, true)
- test.IsEqualString(t, retrievedFile.SHA256, "5bbfa18805eb12c678cfd284c956718d57039e37")
+ test.IsEqualString(t, retrievedFile.SHA1, "5bbfa18805eb12c678cfd284c956718d57039e37")
createBigFile("bigfile", 20)
header.Size = int64(20) * 1024 * 1024
@@ -279,7 +298,7 @@ func TestNewFile(t *testing.T) {
retrievedFile, ok = database.GetMetaDataById(file.Id)
test.IsEqualBool(t, ok, true)
test.IsEqualString(t, retrievedFile.Name, "bigfile")
- test.IsEqualString(t, retrievedFile.SHA256, "c1c165c30d0def15ba2bc8f1bd243be13b8c8fe7")
+ test.IsEqualString(t, retrievedFile.SHA1, "c1c165c30d0def15ba2bc8f1bd243be13b8c8fe7")
bigFile.Close()
database.DeleteMetaData(retrievedFile.Id)
@@ -305,7 +324,6 @@ func TestNewFile(t *testing.T) {
Expiry: 999,
ExpiryTimestamp: 2147483600,
MaxMemory: 10,
- DataDir: "test/data",
}
testconfiguration.EnableS3()
config, ok := cloudconfig.Load()
@@ -317,12 +335,90 @@ func TestNewFile(t *testing.T) {
retrievedFile, ok = database.GetMetaDataById(file.Id)
test.IsEqualBool(t, ok, true)
test.IsEqualString(t, retrievedFile.Name, "bigfile")
- test.IsEqualString(t, retrievedFile.SHA256, "f1474c19eff0fc8998fa6e1b1f7bf31793b103a6")
+ test.IsEqualString(t, retrievedFile.SHA1, "f1474c19eff0fc8998fa6e1b1f7bf31793b103a6")
test.IsEqualString(t, retrievedFile.Size, "20.0 MB")
testconfiguration.DisableS3()
}
}
+func TestNewFileFromChunk(t *testing.T) {
+ test.FileDoesNotExist(t, "test/data/6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ id, header, request, err := createTestChunk()
+ test.IsNil(t, err)
+ file, err := NewFileFromChunk(id, header, request)
+ test.IsNil(t, err)
+ test.IsEqualString(t, file.Name, "test.dat")
+ test.IsEqualString(t, file.Size, "41 B")
+ test.IsEqualString(t, file.SHA1, "6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ test.IsEqualString(t, file.ExpireAtString, "2038-01-19 03:13")
+ test.IsEqualInt64(t, file.ExpireAt, 2147483600)
+ test.IsEqualInt(t, file.DownloadsRemaining, 1)
+ test.IsEqualInt(t, file.DownloadCount, 0)
+ test.IsEmpty(t, file.PasswordHash)
+ test.IsEmpty(t, file.HotlinkId)
+ test.IsEqualString(t, file.ContentType, "text/plain")
+ test.IsEqualBool(t, file.UnlimitedTime, false)
+ test.IsEqualBool(t, file.UnlimitedDownloads, false)
+ test.FileExists(t, "test/data/6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ test.FileDoesNotExist(t, "test/data/chunk-"+id)
+ retrievedFile, ok := database.GetMetaDataById(file.Id)
+ test.IsEqualBool(t, ok, true)
+ test.IsEqualStruct(t, file, retrievedFile)
+
+ id, header, request, err = createTestChunk()
+ header.Filename = "newfile"
+ request.UnlimitedTime = true
+ request.UnlimitedDownload = true
+ test.IsNil(t, err)
+ file, err = NewFileFromChunk(id, header, request)
+ test.IsNil(t, err)
+ test.IsEqualString(t, file.Name, "newfile")
+ test.IsEqualString(t, file.Size, "41 B")
+ test.IsEqualString(t, file.SHA1, "6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ test.IsEqualString(t, file.ExpireAtString, "2038-01-19 03:13")
+ test.IsEqualInt64(t, file.ExpireAt, 2147483600)
+ test.IsEqualInt(t, file.DownloadsRemaining, 1)
+ test.IsEqualInt(t, file.DownloadCount, 0)
+ test.IsEmpty(t, file.PasswordHash)
+ test.IsEmpty(t, file.HotlinkId)
+ test.IsEqualString(t, file.ContentType, "text/plain")
+ test.IsEqualBool(t, file.UnlimitedTime, true)
+ test.IsEqualBool(t, file.UnlimitedDownloads, true)
+ test.FileExists(t, "test/data/6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ test.FileDoesNotExist(t, "test/data/chunk-"+id)
+ retrievedFile, ok = database.GetMetaDataById(file.Id)
+ test.IsEqualBool(t, ok, true)
+ test.IsEqualStruct(t, file, retrievedFile)
+ err = os.Remove("test/data/6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ test.IsNil(t, err)
+
+ _, err = NewFileFromChunk("invalid", header, request)
+ test.IsNotNil(t, err)
+ id, header, request, err = createTestChunk()
+ test.IsNil(t, err)
+ header.Size = 100000
+ file, err = NewFileFromChunk(id, header, request)
+ test.IsNotNil(t, err)
+
+ if aws.IsIncludedInBuild {
+ testconfiguration.EnableS3()
+ config, ok := cloudconfig.Load()
+ test.IsEqualBool(t, ok, true)
+ ok = aws.Init(config.Aws)
+ test.IsEqualBool(t, ok, true)
+ id, header, request, err = createTestChunk()
+ test.IsNil(t, err)
+ file, err = NewFileFromChunk(id, header, request)
+ test.IsNil(t, err)
+ test.IsEqualBool(t, file.AwsBucket != "", true)
+ test.IsEqualString(t, file.SHA1, "6cca7a6905774e6d61a77dca3ad7a1f44581d6ab")
+ retrievedFile, ok = database.GetMetaDataById(file.Id)
+ test.IsEqualStruct(t, file, retrievedFile)
+ test.IsEqualBool(t, ok, true)
+ testconfiguration.DisableS3()
+ }
+}
+
func TestDuplicateFile(t *testing.T) {
tempFile, err := createTestFile()
@@ -625,7 +721,7 @@ func TestDeleteFile(t *testing.T) {
Id: "awsTest1234567890123",
Name: "aws Test File",
Size: "20 MB",
- SHA256: "x341354656543213246465465465432456898794",
+ SHA1: "x341354656543213246465465465432456898794",
AwsBucket: "gokapi-test",
}
database.SaveMetaData(awsFile)
@@ -662,7 +758,6 @@ func TestRequiresClientDecryption(t *testing.T) {
file.Encryption.IsEncrypted = true
result = RequiresClientDecryption(file)
test.IsEqualBool(t, result, false)
-
}
func createBigFile(name string, megabytes int64) {
diff --git a/internal/storage/chunking/Chunking.go b/internal/storage/chunking/Chunking.go
new file mode 100644
index 0000000..170b7cd
--- /dev/null
+++ b/internal/storage/chunking/Chunking.go
@@ -0,0 +1,201 @@
+package chunking
+
+import (
+ "errors"
+ "github.com/forceu/gokapi/internal/configuration"
+ "github.com/forceu/gokapi/internal/helper"
+ "io"
+ "mime/multipart"
+ "net/http"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+type ChunkInfo struct {
+ TotalFilesizeBytes int64
+ Offset int64
+ UUID string
+}
+type FileHeader struct {
+ Filename string
+ ContentType string
+ Size int64
+}
+
+func ParseChunkInfo(r *http.Request, isApiCall bool) (ChunkInfo, error) {
+ info := ChunkInfo{}
+ err := r.ParseForm()
+ if err != nil {
+ return ChunkInfo{}, err
+ }
+
+ formTotalSize := "dztotalfilesize"
+ formOffset := "dzchunkbyteoffset"
+ formUuid := "dzuuid"
+
+ if isApiCall {
+ formTotalSize = "filesize"
+ formOffset = "offset"
+ formUuid = "uuid"
+ }
+
+ buf := r.Form.Get(formTotalSize)
+ info.TotalFilesizeBytes, err = strconv.ParseInt(buf, 10, 64)
+ if err != nil {
+ return ChunkInfo{}, err
+ }
+ if info.TotalFilesizeBytes < 0 {
+ return ChunkInfo{}, errors.New("value cannot be negative")
+ }
+
+ buf = r.Form.Get(formOffset)
+ info.Offset, err = strconv.ParseInt(buf, 10, 64)
+ if err != nil {
+ return ChunkInfo{}, err
+ }
+ if info.Offset < 0 {
+ return ChunkInfo{}, errors.New("value cannot be negative")
+ }
+
+ info.UUID = r.Form.Get(formUuid)
+ if len(info.UUID) < 10 {
+ return ChunkInfo{}, errors.New("invalid uuid submitted, needs to be at least 10 characters long")
+ }
+ info.UUID = sanitseUuid(info.UUID)
+ return info, nil
+}
+
+func sanitseUuid(input string) string {
+ reg, err := regexp.Compile("[^a-zA-Z0-9-]")
+ helper.Check(err)
+ return reg.ReplaceAllString(input, "_")
+}
+
+func ParseFileHeader(r *http.Request) (FileHeader, error) {
+ err := r.ParseForm()
+ if err != nil {
+ return FileHeader{}, err
+ }
+ name := r.Form.Get("filename")
+ if name == "" {
+ return FileHeader{}, errors.New("empty filename provided")
+ }
+ contentType := parseContentType(r)
+ size := r.Form.Get("filesize")
+ if size == "" {
+ return FileHeader{}, errors.New("empty size provided")
+ }
+ sizeInt, err := strconv.ParseInt(size, 10, 64)
+ if sizeInt < 0 {
+ return FileHeader{}, errors.New("value cannot be negative")
+ }
+ if err != nil {
+ return FileHeader{}, err
+ }
+ return FileHeader{
+ Filename: name,
+ Size: sizeInt,
+ ContentType: contentType,
+ }, nil
+}
+
+func parseContentType(r *http.Request) string {
+ contentType := r.Form.Get("filecontenttype")
+ if contentType != "" {
+ return contentType
+ }
+ fileExt := strings.ToLower(filepath.Ext(r.Form.Get("filename")))
+ switch fileExt {
+ case ".jpeg":
+ fallthrough
+ case ".jpg":
+ contentType = "image/jpeg"
+ case ".png":
+ contentType = "image/png"
+ case ".gif":
+ contentType = "image/gif"
+ case ".webp":
+ contentType = "image/webp"
+ case ".bmp":
+ contentType = "image/bmp"
+ case ".svg":
+ contentType = "image/svg+xml"
+ case ".tiff":
+ fallthrough
+ case ".tif":
+ contentType = "image/tiff"
+ case ".ico":
+ contentType = "image/vnd.microsoft.icon"
+ default:
+ contentType = "application/octet-stream"
+ }
+ return contentType
+}
+
+func ParseMultipartHeader(header *multipart.FileHeader) (FileHeader, error) {
+ if header.Filename == "" {
+ return FileHeader{}, errors.New("empty filename provided")
+ }
+ if header.Header.Get("Content-Type") == "" {
+ return FileHeader{}, errors.New("empty content-type provided")
+ }
+ return FileHeader{
+ Filename: header.Filename,
+ Size: header.Size,
+ ContentType: header.Header.Get("Content-Type"),
+ }, nil
+}
+
+func getChunkFilePath(id string) string {
+ return configuration.Get().DataDir + "/chunk-" + id
+}
+
+func GetFileByChunkId(id string) (*os.File, error) {
+ file, err := os.OpenFile(getChunkFilePath(id), os.O_RDWR, 0600)
+ if err != nil {
+ return nil, err
+ }
+ return file, nil
+}
+
+func NewChunk(chunkContent io.Reader, fileHeader *multipart.FileHeader, info ChunkInfo) error {
+ err := allocateFile(info)
+ if err != nil {
+ return err
+ }
+ return writeChunk(chunkContent, fileHeader, info)
+}
+
+func allocateFile(info ChunkInfo) error {
+ if helper.FileExists(getChunkFilePath(info.UUID)) {
+ return nil
+ }
+ file, err := os.OpenFile(getChunkFilePath(info.UUID), os.O_RDWR|os.O_CREATE, 0600)
+ if err != nil {
+ return err
+ }
+ err = file.Truncate(info.TotalFilesizeBytes)
+ return err
+}
+
+func writeChunk(chunkContent io.Reader, fileHeader *multipart.FileHeader, info ChunkInfo) error {
+ if info.Offset+fileHeader.Size > info.TotalFilesizeBytes {
+ return errors.New("chunksize will be bigger than total filesize from this offset")
+ }
+ file, err := GetFileByChunkId(info.UUID)
+ if err != nil {
+ return err
+ }
+ newOffset, err := file.Seek(info.Offset, io.SeekStart)
+ if err != nil {
+ return err
+ }
+ if newOffset != info.Offset {
+ return errors.New("seek returned invalid offset")
+ }
+ _, err = io.Copy(file, chunkContent)
+ return err
+}
diff --git a/internal/storage/chunking/Chunking_test.go b/internal/storage/chunking/Chunking_test.go
new file mode 100644
index 0000000..9d8b1d4
--- /dev/null
+++ b/internal/storage/chunking/Chunking_test.go
@@ -0,0 +1,361 @@
+package chunking
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "encoding/hex"
+ "github.com/forceu/gokapi/internal/configuration"
+ "github.com/forceu/gokapi/internal/helper"
+ "github.com/forceu/gokapi/internal/test"
+ "github.com/forceu/gokapi/internal/test/testconfiguration"
+ "github.com/juju/ratelimit"
+ "golang.org/x/sync/errgroup"
+ "mime/multipart"
+ "net/textproto"
+ "net/url"
+ "os"
+ "strings"
+ "testing"
+)
+
+func TestMain(m *testing.M) {
+ testconfiguration.Create(false)
+ configuration.Load()
+ exitVal := m.Run()
+ testconfiguration.Delete()
+ os.Exit(exitVal)
+}
+
+func TestParseChunkInfo(t *testing.T) {
+ data := url.Values{}
+ data.Set("dztotalfilesize", "100000")
+ data.Set("dzchunkbyteoffset", "10")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r := test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ info, err := ParseChunkInfo(r, false)
+ test.IsNil(t, err)
+ test.IsEqualInt64(t, info.TotalFilesizeBytes, 100000)
+ test.IsEqualInt64(t, info.Offset, 10)
+ test.IsEqualString(t, info.UUID, "fweflwfejkfwejf-wekjefwjfwej")
+
+ data.Set("dzuuid", "23432")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dzuuid", "!\"§$%&/()=?abc-")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ info, err = ParseChunkInfo(r, false)
+ test.IsNil(t, err)
+ test.IsEqualInt64(t, info.TotalFilesizeBytes, 100000)
+ test.IsEqualInt64(t, info.Offset, 10)
+ test.IsEqualString(t, info.UUID, "___________abc-")
+
+ data.Set("dzchunkbyteoffset", "invalid")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dzchunkbyteoffset", "-1")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dzchunkbyteoffset", "")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dzchunkbyteoffset", "0")
+ data.Set("dztotalfilesize", "invalid")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dztotalfilesize", "-1")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data.Set("dztotalfilesize", "")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader("invalid§%&§"))
+ _, err = ParseChunkInfo(r, false)
+ test.IsNotNil(t, err)
+
+ data = url.Values{}
+ data.Set("filesize", "100000")
+ data.Set("offset", "10")
+ data.Set("uuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ info, err = ParseChunkInfo(r, true)
+ test.IsNil(t, err)
+ test.IsEqualInt64(t, info.TotalFilesizeBytes, 100000)
+ test.IsEqualInt64(t, info.Offset, 10)
+ test.IsEqualString(t, info.UUID, "fweflwfejkfwejf-wekjefwjfwej")
+}
+
+func TestParseContentType(t *testing.T) {
+ var imageFileExtensions = []string{".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".svg", ".tiff", ".tif", ".ico"}
+
+ data := url.Values{}
+ data.Set("filename", "test.unknown")
+ data.Set("filecontenttype", "test/unknown")
+ _, r := test.GetRecorder("POST", "/uploadComplete", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ err := r.ParseForm()
+ test.IsNil(t, err)
+ contentType := parseContentType(r)
+ test.IsEqualString(t, contentType, "test/unknown")
+
+ data.Set("filecontenttype", "")
+ _, r = test.GetRecorder("POST", "/uploadComplete", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ err = r.ParseForm()
+ test.IsNil(t, err)
+ contentType = parseContentType(r)
+ test.IsEqualString(t, contentType, "application/octet-stream")
+
+ for _, imageExt := range imageFileExtensions {
+ data.Set("filename", "test"+imageExt)
+ _, r = test.GetRecorder("POST", "/uploadComplete", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ err = r.ParseForm()
+ test.IsNil(t, err)
+ contentType = parseContentType(r)
+ test.IsNotEqualString(t, contentType, "application/octet-stream")
+ test.IsNotEqualString(t, contentType, "")
+ test.IsEqualBool(t, strings.Contains(contentType, "image/"), true)
+ }
+}
+
+func TestParseFileHeader(t *testing.T) {
+ data := url.Values{}
+ data.Set("filename", "testfile")
+ data.Set("filecontenttype", "test/content")
+ data.Set("filesize", "1000")
+ _, r := test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ header, err := ParseFileHeader(r)
+ test.IsNil(t, err)
+ test.IsEqualString(t, header.Filename, "testfile")
+ test.IsEqualString(t, header.ContentType, "test/content")
+ test.IsEqualInt64(t, header.Size, 1000)
+
+ data.Set("filename", "")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseFileHeader(r)
+ test.IsNotNil(t, err)
+
+ data.Set("filename", "testfile")
+ data.Set("filecontenttype", "")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ header, err = ParseFileHeader(r)
+ test.IsNil(t, err)
+ test.IsEqualString(t, header.ContentType, "application/octet-stream")
+
+ data.Set("filecontenttype", "test/content")
+ data.Set("filesize", "invalid")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseFileHeader(r)
+ test.IsNotNil(t, err)
+
+ data.Set("filesize", "")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseFileHeader(r)
+ test.IsNotNil(t, err)
+
+ data.Set("filesize", "-5")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseFileHeader(r)
+ test.IsNotNil(t, err)
+
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader("invalid§%&§"))
+ _, err = ParseFileHeader(r)
+ test.IsNotNil(t, err)
+
+ data = url.Values{}
+ data.Set("dztotalfilesize", "100000")
+ data.Set("dzchunkbyteoffset", "10")
+ data.Set("dzuuid", "fweflwfejkfwejf-wekjefwjfwej")
+ _, r = test.GetRecorder("POST", "/uploadChunk", nil, []test.Header{
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ _, err = ParseChunkInfo(r, true)
+ test.IsNotNil(t, err)
+}
+
+func TestParseMultipartHeader(t *testing.T) {
+ mimeHeader := make(textproto.MIMEHeader)
+ mimeHeader.Set("Content-Type", "test/type")
+ multipartHeader := multipart.FileHeader{
+ Filename: "testfile",
+ Size: 100,
+ Header: mimeHeader,
+ }
+
+ header, err := ParseMultipartHeader(&multipartHeader)
+ test.IsNil(t, err)
+ test.IsEqualInt64(t, header.Size, 100)
+ test.IsEqualString(t, header.Filename, "testfile")
+ test.IsEqualString(t, header.ContentType, "test/type")
+
+ multipartHeader.Filename = ""
+ _, err = ParseMultipartHeader(&multipartHeader)
+ test.IsNotNil(t, err)
+
+ multipartHeader.Filename = "testfile"
+ multipartHeader.Header.Del("Content-Type")
+ _, err = ParseMultipartHeader(&multipartHeader)
+ test.IsNotNil(t, err)
+}
+
+func TestGetChunkFilePath(t *testing.T) {
+ test.IsEqualString(t, getChunkFilePath("test"), "test/data/chunk-test")
+}
+
+func TestGetFileByChunkId(t *testing.T) {
+ test.FileDoesNotExist(t, "testchunk")
+ _, err := GetFileByChunkId("testchunk")
+ test.IsNotNil(t, err)
+ err = os.WriteFile("test/data/chunk-testchunk", []byte("conent"), 0777)
+ test.IsNil(t, err)
+ file, err := GetFileByChunkId("testchunk")
+ test.IsEqualString(t, file.Name(), "test/data/chunk-testchunk")
+ test.IsNil(t, err)
+ err = os.Remove(file.Name())
+ test.IsNil(t, err)
+}
+
+func TestNewChunk(t *testing.T) {
+ info := ChunkInfo{
+ TotalFilesizeBytes: 100,
+ Offset: 0,
+ UUID: "testuuid12345",
+ }
+ header := multipart.FileHeader{
+ Size: 21,
+ }
+ err := NewChunk(strings.NewReader("This is a test content"), &header, info)
+ test.IsNil(t, err)
+ test.IsEqualString(t, sha1sumFile("test/data/chunk-testuuid12345"), "a69ec3c3a031e3540d0c2a864ca931f3d54e2c13")
+
+ info.Offset = 52
+ header = multipart.FileHeader{
+ Size: 11,
+ }
+ err = NewChunk(strings.NewReader("More content"), &header, info)
+ test.IsNil(t, err)
+ test.IsEqualString(t, sha1sumFile("test/data/chunk-testuuid12345"), "8794d8352fae46b83bab83d3e613dde8f0244ded")
+
+ info.Offset = 99
+ err = NewChunk(strings.NewReader("More content"), &header, info)
+ test.IsNotNil(t, err)
+
+ err = os.Remove("test/data/chunk-testuuid12345")
+ test.IsNil(t, err)
+
+ info.TotalFilesizeBytes = -4
+ err = NewChunk(strings.NewReader("More content"), &header, info)
+ test.IsNotNil(t, err)
+
+ info.TotalFilesizeBytes = 100
+ info.UUID = "../../../../../../../../../../invalid"
+ err = NewChunk(strings.NewReader("More content"), &header, info)
+ test.IsNotNil(t, err)
+
+ // Testing simultaneous writes
+ egroup := new(errgroup.Group)
+ egroup.Go(func() error {
+ return writeRateLimitedChunk(true)
+ })
+ egroup.Go(func() error {
+ return writeRateLimitedChunk(false)
+ })
+ err = egroup.Wait()
+ test.IsNil(t, err)
+}
+
+func writeRateLimitedChunk(firstHalf bool) error {
+ var offset int64
+ if !firstHalf {
+ offset = 500 * 1024
+ }
+ info := ChunkInfo{
+ TotalFilesizeBytes: 1000 * 1024,
+ Offset: offset,
+ UUID: "multiplewrites",
+ }
+ header := multipart.FileHeader{
+ Size: 500 * 1024,
+ }
+ content := []byte(helper.GenerateRandomString(500 * 1024))
+ bucket := ratelimit.NewBucketWithRate(400*1024, 400*1024)
+ err := NewChunk(ratelimit.Reader(bytes.NewReader(content), bucket), &header, info)
+ return err
+}
+
+func TestWriteChunk(t *testing.T) {
+ err := writeChunk(nil, &multipart.FileHeader{Size: 10}, ChunkInfo{
+ UUID: "",
+ TotalFilesizeBytes: 10,
+ })
+ test.IsNotNil(t, err)
+}
+
+func sha1sumFile(filename string) string {
+ sha := sha1.New()
+ filecontent, err := os.ReadFile(filename)
+ if err != nil {
+ panic(err)
+ }
+ sha.Write(filecontent)
+ return hex.EncodeToString(sha.Sum(nil))
+}
diff --git a/internal/storage/cloudstorage/aws/AwsS3.go b/internal/storage/cloudstorage/aws/AwsS3.go
index a03ad1f..8562a24 100644
--- a/internal/storage/cloudstorage/aws/AwsS3.go
+++ b/internal/storage/cloudstorage/aws/AwsS3.go
@@ -67,7 +67,7 @@ func IsValidLogin(config models.AwsConfig) (bool, error) {
}
tempConfig := awsConfig
awsConfig = config
- _, _, err := FileExists(models.File{AwsBucket: awsConfig.Bucket, SHA256: "invalid"})
+ _, _, err := FileExists(models.File{AwsBucket: awsConfig.Bucket, SHA1: "invalid"})
awsConfig = tempConfig
if err != nil {
return false, err
@@ -92,7 +92,7 @@ func Upload(input io.Reader, file models.File) (string, error) {
result, err := uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String(file.AwsBucket),
- Key: aws.String(file.SHA256),
+ Key: aws.String(file.SHA1),
Body: input,
})
if err != nil {
@@ -108,7 +108,7 @@ func Download(writer io.WriterAt, file models.File) (int64, error) {
size, err := downloader.Download(writer, &s3.GetObjectInput{
Bucket: aws.String(file.AwsBucket),
- Key: aws.String(file.SHA256),
+ Key: aws.String(file.SHA1),
})
if err != nil {
return 0, err
@@ -129,7 +129,7 @@ func RedirectToDownload(w http.ResponseWriter, r *http.Request, file models.File
req, _ := s3svc.GetObjectRequest(&s3.GetObjectInput{
Bucket: aws.String(file.AwsBucket),
- Key: aws.String(file.SHA256),
+ Key: aws.String(file.SHA1),
ResponseContentDisposition: aws.String(contentDisposition),
ResponseCacheControl: aws.String("no-store"),
ResponseContentType: aws.String(file.ContentType),
@@ -151,7 +151,7 @@ func FileExists(file models.File) (bool, int64, error) {
info, err := svc.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(file.AwsBucket),
- Key: aws.String(file.SHA256),
+ Key: aws.String(file.SHA1),
})
if err != nil {
@@ -173,7 +173,7 @@ func DeleteObject(file models.File) (bool, error) {
_, err := svc.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(file.AwsBucket),
- Key: aws.String(file.SHA256),
+ Key: aws.String(file.SHA1),
})
if err != nil {
diff --git a/internal/storage/cloudstorage/aws/AwsS3_mock.go b/internal/storage/cloudstorage/aws/AwsS3_mock.go
index f1a7598..9789b94 100644
--- a/internal/storage/cloudstorage/aws/AwsS3_mock.go
+++ b/internal/storage/cloudstorage/aws/AwsS3_mock.go
@@ -38,7 +38,7 @@ func Init(config models.AwsConfig) bool {
Id: "awsTest1234567890123",
Name: "aws Test File",
Size: "20 MB",
- SHA256: "x341354656543213246465465465432456898794",
+ SHA1: "x341354656543213246465465465432456898794",
AwsBucket: "gokapi-test",
})
return true
@@ -120,7 +120,7 @@ func Download(writer io.WriterAt, file models.File) (int64, error) {
func isUploaded(file models.File) bool {
for _, element := range uploadedFiles {
- if element.SHA256 == file.SHA256 {
+ if element.SHA1 == file.SHA1 {
return true
}
}
@@ -161,7 +161,7 @@ func DeleteObject(file models.File) (bool, error) {
var buffer []models.File
for _, element := range uploadedFiles {
- if element.SHA256 != file.SHA256 {
+ if element.SHA1 != file.SHA1 {
buffer = append(buffer, element)
}
}
diff --git a/internal/storage/cloudstorage/aws/AwsS3_test.go b/internal/storage/cloudstorage/aws/AwsS3_test.go
index fa40958..23c8897 100644
--- a/internal/storage/cloudstorage/aws/AwsS3_test.go
+++ b/internal/storage/cloudstorage/aws/AwsS3_test.go
@@ -17,13 +17,13 @@ var testFile, invalidFile, invalidBucket, invalidAll models.File
func TestMain(m *testing.M) {
testFile.AwsBucket = "gokapi-test"
- testFile.SHA256 = "testfile"
+ testFile.SHA1 = "testfile"
invalidFile.AwsBucket = "gokapi-test"
- invalidFile.SHA256 = "invalid"
+ invalidFile.SHA1 = "invalid"
invalidBucket.AwsBucket = "invalid"
- invalidBucket.SHA256 = "testfile"
+ invalidBucket.SHA1 = "testfile"
invalidAll.AwsBucket = "invalid"
- invalidAll.SHA256 = "invalid"
+ invalidAll.SHA1 = "invalid"
if os.Getenv("REAL_AWS_CREDENTIALS") != "true" {
ts := startMockServer()
os.Setenv("GOKAPI_AWS_ENDPOINT", ts.URL)
diff --git a/internal/test/TestHelper.go b/internal/test/TestHelper.go
index 050ca36..0fa4267 100644
--- a/internal/test/TestHelper.go
+++ b/internal/test/TestHelper.go
@@ -12,6 +12,7 @@ import (
"net/url"
"os"
"path/filepath"
+ "reflect"
"strconv"
"strings"
"time"
@@ -55,6 +56,14 @@ func IsEqualBool(t MockT, got, want bool) {
}
}
+// IsEqualStruct fails test if got and want are not identical
+func IsEqualStruct(t MockT, got, want any) {
+ t.Helper()
+ if !reflect.DeepEqual(got, want) {
+ t.Errorf("Assertion failed, got: %+v, want: %+v.", got, want)
+ }
+}
+
// IsEqualInt fails test if got and want are not identical
func IsEqualInt(t MockT, got, want int) {
t.Helper()
@@ -250,7 +259,7 @@ func checkResponse(t MockT, response *http.Response, config HttpTestConfig) {
t.Helper()
IsEqualBool(t, response != nil, true)
if response.StatusCode != config.ResultCode {
- t.Errorf("Status %d != %d", config.ResultCode, response.StatusCode)
+ t.Errorf("Status Code - Got: %d Want: %d", config.ResultCode, response.StatusCode)
}
content, err := ioutil.ReadAll(response.Body)
@@ -325,23 +334,13 @@ type PostBody struct {
func HttpPostUploadRequest(t MockT, config HttpTestConfig) {
t.Helper()
config.init(t)
- file, err := os.Open(config.UploadFileName)
- IsNil(t, err)
- defer file.Close()
- body := &bytes.Buffer{}
- writer := multipart.NewWriter(body)
- part, err := writer.CreateFormFile(config.UploadFieldName, filepath.Base(file.Name()))
- IsNil(t, err)
-
- io.Copy(part, file)
- writer.Close()
+ body, formcontent := FileToMultipartFormBody(t, config)
request, err := http.NewRequest("POST", config.Url, body)
IsNil(t, err)
-
for _, cookie := range config.Cookies {
request.Header.Set("Cookie", cookie.toString())
}
- request.Header.Add("Content-Type", writer.FormDataContentType())
+ request.Header.Add("Content-Type", formcontent)
client := &http.Client{}
response, err := client.Do(request)
@@ -351,6 +350,25 @@ func HttpPostUploadRequest(t MockT, config HttpTestConfig) {
checkResponse(t, response, config)
}
+func FileToMultipartFormBody(t MockT, config HttpTestConfig) (*bytes.Buffer, string) {
+ file, err := os.Open(config.UploadFileName)
+ IsNil(t, err)
+ defer file.Close()
+ body := &bytes.Buffer{}
+ writer := multipart.NewWriter(body)
+ for _, postValue := range config.PostValues {
+ err = writer.WriteField(postValue.Key, postValue.Value)
+ IsNil(t, err)
+ }
+ part, err := writer.CreateFormFile(config.UploadFieldName, filepath.Base(file.Name()))
+ IsNil(t, err)
+
+ _, err = io.Copy(part, file)
+ IsNil(t, err)
+ defer writer.Close()
+ return body, writer.FormDataContentType()
+}
+
// HttpPostRequest sends a post request
func HttpPostRequest(t MockT, config HttpTestConfig) []*http.Cookie {
t.Helper()
@@ -360,10 +378,22 @@ func HttpPostRequest(t MockT, config HttpTestConfig) []*http.Cookie {
for _, dataField := range config.PostValues {
data.Add(dataField.Key, dataField.Value)
}
-
- response, err := http.PostForm(config.Url, data)
+ r, err := http.NewRequest("POST", config.Url, strings.NewReader(data.Encode()))
IsNil(t, err)
+ for _, cookie := range config.Cookies {
+ r.AddCookie(&http.Cookie{
+ Name: cookie.Name,
+ Value: cookie.Value,
+ Path: "/",
+ })
+ }
+ r.Header.Set("Content-type", "application/x-www-form-urlencoded")
+ client := &http.Client{}
+ response, err := client.Do(r)
+ IsNil(t, err)
+ defer response.Body.Close()
+
checkResponse(t, response, config)
return response.Cookies()
}
diff --git a/internal/test/TestHelper_test.go b/internal/test/TestHelper_test.go
index 0a9477c..ac629d2 100644
--- a/internal/test/TestHelper_test.go
+++ b/internal/test/TestHelper_test.go
@@ -58,6 +58,8 @@ func TestFunctions(t *testing.T) {
mockT.WantNoFail()
IsEqualInt(mockT, 1, 1)
mockT.WantNoFail()
+ IsEqualInt64(mockT, 2, 2)
+ mockT.WantNoFail()
IsNotEmpty(mockT, "notEmpty")
mockT.WantNoFail()
IsEmpty(mockT, "")
@@ -80,6 +82,8 @@ func TestFunctions(t *testing.T) {
mockT.WantFail()
IsEqualInt(mockT, 1, 2)
mockT.WantFail()
+ IsEqualInt64(mockT, 4, 9)
+ mockT.WantFail()
IsNotEmpty(mockT, "")
mockT.WantFail()
IsEmpty(mockT, "notEmpty")
@@ -157,12 +161,13 @@ func TestHttpPageResult(t *testing.T) {
mockT.Check()
}
-func TestHttpPostRequest(t *testing.T) {
+func TestHttpPostUploadRequest(t *testing.T) {
os.WriteFile("testfile", []byte("Testbytes"), 0777)
HttpPostUploadRequest(t, HttpTestConfig{
Url: "http://127.0.0.1:9999/test",
UploadFileName: "testfile",
UploadFieldName: "file",
+ PostValues: []PostBody{{Key: "test", Value: "test2"}},
RequiredContent: []string{"TestContent", "testName", "testValue"},
ExcludedContent: []string{"invalid"},
Cookies: []Cookie{{
@@ -188,6 +193,68 @@ func TestHttpPostRequest(t *testing.T) {
mockT.Check()
os.Remove("testfile")
}
+func TestHttpPageResultJson(t *testing.T) {
+ HttpPageResultJson(t, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ PostValues: []PostBody{{Key: "test", Value: "test2"}},
+ RequiredContent: []string{"TestContent", "testName", "testValue"},
+ ExcludedContent: []string{"invalid"},
+ Cookies: []Cookie{{
+ Name: "testName",
+ Value: "testValue",
+ }},
+ })
+ mockT := MockTest{reference: t}
+ mockT.WantFail()
+ HttpPageResultJson(mockT, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ Headers: []Header{{Name: "test", Value: "input"}},
+ ExcludedContent: []string{"TestContent"}},
+ )
+ mockT.WantFail()
+ HttpPageResultJson(mockT, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ RequiredContent: []string{"invalid"}},
+ )
+ mockT.Check()
+}
+func TestHttpPostRequest(t *testing.T) {
+ HttpPostRequest(t, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ PostValues: []PostBody{{Key: "test", Value: "test2"}},
+ RequiredContent: []string{"TestContent", "testName", "testValue"},
+ ExcludedContent: []string{"invalid"},
+ Cookies: []Cookie{{
+ Name: "testName",
+ Value: "testValue",
+ }},
+ })
+ mockT := MockTest{reference: t}
+ mockT.WantFail()
+ HttpPostRequest(mockT, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ Headers: []Header{{Name: "test", Value: "input"}},
+ ExcludedContent: []string{"TestContent"}},
+ )
+ mockT.WantFail()
+ HttpPostRequest(mockT, HttpTestConfig{
+ Url: "http://127.0.0.1:9999/test",
+ UploadFileName: "testfile",
+ UploadFieldName: "file",
+ RequiredContent: []string{"invalid"}},
+ )
+ mockT.Check()
+}
func TestResponseBodyContains(t *testing.T) {
mockT := MockTest{reference: t}
diff --git a/internal/test/testconfiguration/TestConfiguration.go b/internal/test/testconfiguration/TestConfiguration.go
index 412a38c..1e67045 100644
--- a/internal/test/testconfiguration/TestConfiguration.go
+++ b/internal/test/testconfiguration/TestConfiguration.go
@@ -3,7 +3,6 @@
package testconfiguration
import (
- "bytes"
"fmt"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/helper"
@@ -13,6 +12,7 @@ import (
"github.com/johannesboyne/gofakes3/backend/s3mem"
"net/http/httptest"
"os"
+ "strings"
"time"
)
@@ -61,9 +61,9 @@ func Create(initFiles bool) {
func WriteEncryptedFile() string {
name := helper.GenerateRandomString(10)
database.SaveMetaData(models.File{
- Id: name,
- Name: name,
- SHA256: name,
+ Id: name,
+ Name: name,
+ SHA1: name,
Encryption: models.EncryptionInfo{
IsEncrypted: true,
},
@@ -73,18 +73,6 @@ func WriteEncryptedFile() string {
return name
}
-// WriteUpgradeConfigFileV0 writes a Gokapi v1.1.0 config file
-func WriteUpgradeConfigFileV0() {
- os.Mkdir(dataDir, 0777)
- os.WriteFile(configFile, configUpgradeTestFile, 0777)
-}
-
-// WriteUpgradeConfigFileV8 writes a Gokapi v1.3 config file
-func WriteUpgradeConfigFileV8() {
- os.Mkdir(dataDir, 0777)
- os.WriteFile(configFile, configTestFileV8, 0777)
-}
-
// WriteSslCertificates writes a valid or invalid SSL certificate
func WriteSslCertificates(valid bool) {
os.Mkdir(dataDir, 0777)
@@ -140,7 +128,7 @@ func StartS3TestServer() *httptest.Server {
backend := s3mem.New()
_ = backend.CreateBucket("gokapi")
_ = backend.CreateBucket("gokapi-test")
- _, _ = backend.PutObject("gokapi-test", "x341354656543213246465465465432456898794", nil, bytes.NewReader([]byte{}), 0)
+ _, _ = backend.PutObject("gokapi-test", "x341354656543213246465465465432456898794", nil, strings.NewReader("content"), 7)
faker := gofakes3.New(backend)
server := httptest.NewServer(faker.Server())
os.Setenv("GOKAPI_AWS_ENDPOINT", server.URL)
@@ -204,7 +192,7 @@ func writeTestFiles() {
Id: "Wzol7LyY2QVczXynJtVo",
Name: "smallfile2",
Size: "8 B",
- SHA256: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
+ SHA1: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 1,
@@ -214,7 +202,7 @@ func writeTestFiles() {
Id: "e4TjE7CokWK0giiLNxDL",
Name: "smallfile2",
Size: "8 B",
- SHA256: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
+ SHA1: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
ExpireAt: 2147483645,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 2,
@@ -224,7 +212,7 @@ func writeTestFiles() {
Id: "wefffewhtrhhtrhtrhtr",
Name: "smallfile3",
Size: "8 B",
- SHA256: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
+ SHA1: "e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
ExpireAt: 2147483645,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 1,
@@ -234,7 +222,7 @@ func writeTestFiles() {
Id: "deletedfile123456789",
Name: "DeletedFile",
Size: "8 B",
- SHA256: "invalid",
+ SHA1: "invalid",
ExpireAt: 2147483645,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 2,
@@ -244,7 +232,7 @@ func writeTestFiles() {
Id: "jpLXGJKigM4hjtA6T6sN",
Name: "smallfile",
Size: "7 B",
- SHA256: "c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
+ SHA1: "c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:18",
DownloadsRemaining: 1,
@@ -255,7 +243,7 @@ func writeTestFiles() {
Id: "jpLXGJKigM4hjtA6T6sN2",
Name: "smallfile",
Size: "7 B",
- SHA256: "c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
+ SHA1: "c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:18",
DownloadsRemaining: 1,
@@ -266,7 +254,7 @@ func writeTestFiles() {
Id: "n1tSTAGj8zan9KaT4u6p",
Name: "picture.jpg",
Size: "4 B",
- SHA256: "a8fdc205a9f19cc1c7507a60c4f01b13d11d7fd0",
+ SHA1: "a8fdc205a9f19cc1c7507a60c4f01b13d11d7fd0",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 1,
@@ -277,7 +265,7 @@ func writeTestFiles() {
Id: "cleanuptest123456789",
Name: "cleanup",
Size: "4 B",
- SHA256: "2341354656543213246465465465432456898794",
+ SHA1: "2341354656543213246465465465432456898794",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 0,
@@ -287,7 +275,7 @@ func writeTestFiles() {
Id: "awsTest1234567890123",
Name: "Aws Test File",
Size: "20 MB",
- SHA256: "x341354656543213246465465465432456898794",
+ SHA1: "x341354656543213246465465465432456898794",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 4,
@@ -298,7 +286,7 @@ func writeTestFiles() {
Id: "unlimitedDownload",
Name: "unlimitedDownload",
Size: "8 B",
- SHA256: "unlimtedtest",
+ SHA1: "unlimtedtest",
ExpireAt: 2147483646,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 0,
@@ -309,7 +297,7 @@ func writeTestFiles() {
Id: "unlimitedTime",
Name: "unlimitedTime",
Size: "8 B",
- SHA256: "unlimtedtest",
+ SHA1: "unlimtedtest",
ExpireAt: 0,
ExpireAtString: "2021-05-04 15:19",
DownloadsRemaining: 1,
@@ -335,209 +323,13 @@ var configTestFile = []byte(`{
"Port":"127.0.0.1:53843",
"ServerUrl": "http://127.0.0.1:53843/",
"RedirectUrl": "https://test.com/",
- "ConfigVersion": 11,
+ "ConfigVersion": 12,
"LengthId": 20,
"DataDir": "test/data",
"MaxMemory": 10,
"UseSsl": false,
"MaxFileSizeMB": 25
}`)
-var configTestFileV8 = []byte(`{
- "Port":"127.0.0.1:53843",
- "AdminName":"test",
- "AdminPassword":"10340aece68aa4fb14507ae45b05506026f276cf",
- "ServerUrl":"http://127.0.0.1:53843/",
- "DefaultDownloads":3,
- "DefaultExpiry":20,
- "DefaultPassword":"123",
- "RedirectUrl":"https://test.com/",
- "Sessions":{
- "validsession":{
- "RenewAt":2147483645,
- "ValidUntil":2147483646
- },
- "logoutsession":{
- "RenewAt":2147483645,
- "ValidUntil":2147483646
- },
- "needsRenewal":{
- "RenewAt":0,
- "ValidUntil":2147483646
- },
- "expiredsession":{
- "RenewAt":0,
- "ValidUntil":0
- }
- },
- "Files":{
- "Wzol7LyY2QVczXynJtVo":{
- "Id":"Wzol7LyY2QVczXynJtVo",
- "Name":"smallfile2",
- "Size":"8 B",
- "SHA256":"e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":1,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":""
- },
- "e4TjE7CokWK0giiLNxDL":{
- "Id":"e4TjE7CokWK0giiLNxDL",
- "Name":"smallfile2",
- "Size":"8 B",
- "SHA256":"e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
- "ExpireAt":2147483645,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":2,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":""
- },
- "wefffewhtrhhtrhtrhtr":{
- "Id":"wefffewhtrhhtrhtrhtr",
- "Name":"smallfile3",
- "Size":"8 B",
- "SHA256":"e017693e4a04a59d0b0f400fe98177fe7ee13cf7",
- "ExpireAt":2147483645,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":1,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":""
- },
- "deletedfile123456789":{
- "Id":"deletedfile123456789",
- "Name":"DeletedFile",
- "Size":"8 B",
- "SHA256":"invalid",
- "ExpireAt":2147483645,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":2,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":""
- },
- "jpLXGJKigM4hjtA6T6sN":{
- "Id":"jpLXGJKigM4hjtA6T6sN",
- "Name":"smallfile",
- "Size":"7 B",
- "SHA256":"c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:18",
- "DownloadsRemaining":1,
- "ContentType":"text/html",
- "PasswordHash":"7b30508aa9b233ab4b8a11b2af5816bdb58ca3e7",
- "HotlinkId":""
- },
- "jpLXGJKigM4hjtA6T6sN2":{
- "Id":"jpLXGJKigM4hjtA6T6sN2",
- "Name":"smallfile",
- "Size":"7 B",
- "SHA256":"c4f9375f9834b4e7f0a528cc65c055702bf5f24a",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:18",
- "DownloadsRemaining":1,
- "ContentType":"text/html",
- "PasswordHash":"7b30508aa9b233ab4b8a11b2af5816bdb58ca3e7",
- "HotlinkId":""
- },
- "n1tSTAGj8zan9KaT4u6p":{
- "Id":"n1tSTAGj8zan9KaT4u6p",
- "Name":"picture.jpg",
- "Size":"4 B",
- "SHA256":"a8fdc205a9f19cc1c7507a60c4f01b13d11d7fd0",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":1,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":"PhSs6mFtf8O5YGlLMfNw9rYXx9XRNkzCnJZpQBi7inunv3Z4A.jpg"
- },
- "cleanuptest123456789":{
- "Id":"cleanuptest123456789",
- "Name":"cleanup",
- "Size":"4 B",
- "SHA256":"2341354656543213246465465465432456898794",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":0,
- "PasswordHash":"",
- "ContentType":"text/html",
- "HotlinkId":""
- },
- "awsTest1234567890123":{
- "Id":"awsTest1234567890123",
- "Name":"Aws Test File",
- "Size":"20 MB",
- "SHA256":"x341354656543213246465465465432456898794",
- "ExpireAt":2147483646,
- "ExpireAtString":"2021-05-04 15:19",
- "DownloadsRemaining":4,
- "PasswordHash":"",
- "ContentType":"application/octet-stream",
- "AwsBucket":"gokapi-test",
- "HotlinkId":""
- }
- },
- "Hotlinks":{
- "PhSs6mFtf8O5YGlLMfNw9rYXx9XRNkzCnJZpQBi7inunv3Z4A.jpg":{
- "Id":"PhSs6mFtf8O5YGlLMfNw9rYXx9XRNkzCnJZpQBi7inunv3Z4A.jpg",
- "FileId":"n1tSTAGj8zan9KaT4u6p"
- }
- },
- "DownloadStatus":{
- "69JCbLVxx2KxfvB6FYkrDn3oCU7BWT":{
- "Id":"69JCbLVxx2KxfvB6FYkrDn3oCU7BWT",
- "FileId":"cleanuptest123456789",
- "ExpireAt":2147483646
- }
- },
- "ApiKeys":{
- "validkey":{
- "Id":"validkey",
- "FriendlyName":"First Key",
- "LastUsed":0,
- "LastUsedString":""
- },
- "GAh1IhXDvYnqfYLazWBqMB9HSFmNPO":{
- "Id":"GAh1IhXDvYnqfYLazWBqMB9HSFmNPO",
- "FriendlyName":"Second Key",
- "LastUsed":1620671580,
- "LastUsedString":"used"
- },
- "jiREglQJW0bOqJakfjdVfe8T1EM8n8":{
- "Id":"jiREglQJW0bOqJakfjdVfe8T1EM8n8",
- "FriendlyName":"Unnamed Key",
- "LastUsed":0,
- "LastUsedString":""
- },
- "okeCMWqhVMZSpt5c1qpCWhKvJJPifb":{
- "Id":"okeCMWqhVMZSpt5c1qpCWhKvJJPifb",
- "FriendlyName":"Unnamed Key",
- "LastUsed":0,
- "LastUsedString":""
- }
- },
- "ConfigVersion":8,
- "SaltAdmin":"LW6fW4Pjv8GtdWVLSZD66gYEev6NAaXxOVBw7C",
- "SaltFiles":"lL5wMTtnVCn5TPbpRaSe4vAQodWW0hgk00WCZE",
- "LengthId":20,
- "DataDir":"test/data",
- "UseSsl":false,
- "MaxFileSizeMB":25
-}`)
-
-var configUpgradeTestFile = []byte(`{
- "Port":"127.0.0.1:53844",
- "AdminName":"admin",
- "AdminPassword":"7450c2403ab85f0e8d5436818b66b99fdd287ac6",
- "ServerUrl":"https://gokapi.url/",
- "DefaultDownloads":1,
- "DefaultExpiry":14,
- "DefaultPassword":"123",
- "RedirectUrl":"https://github.com/Forceu/Gokapi/"
-}`)
var sslCertValid = []byte(`-----BEGIN CERTIFICATE-----
MIIBVzCB/aADAgECAgEBMAoGCCqGSM49BAMCMBExDzANBgNVBAoTBkdva2FwaTAe
diff --git a/internal/test/testconfiguration/TestConfiguration_test.go b/internal/test/testconfiguration/TestConfiguration_test.go
index ab42cf6..8469f0a 100644
--- a/internal/test/testconfiguration/TestConfiguration_test.go
+++ b/internal/test/testconfiguration/TestConfiguration_test.go
@@ -23,19 +23,6 @@ func TestDelete(t *testing.T) {
test.IsEqualBool(t, helper.FolderExists(dataDir), false)
}
-func TestSetUpgradeConfigFileV0(t *testing.T) {
- os.Remove(configFile)
- WriteUpgradeConfigFileV0()
- test.FileExists(t, configFile)
- TestDelete(t)
-}
-func TestSetUpgradeConfigFileV8(t *testing.T) {
- os.Remove(configFile)
- WriteUpgradeConfigFileV0()
- test.FileExists(t, configFile)
- TestDelete(t)
-}
-
func TestWriteEncryptedFile(t *testing.T) {
database.Init("./test/filestorage.db")
fileId := WriteEncryptedFile()
diff --git a/internal/webserver/Webserver.go b/internal/webserver/Webserver.go
index f588b9a..d2b1bce 100644
--- a/internal/webserver/Webserver.go
+++ b/internal/webserver/Webserver.go
@@ -50,7 +50,8 @@ var templateFolderEmbedded embed.FS
//go:embed web/main.wasm
var wasmFile embed.FS
-const timeOutWebserver = 12 * time.Hour
+const timeOutWebserverRead = 15 * time.Minute
+const timeOutWebserverWrite = 12 * time.Hour
// Variable containing all parsed templates
var templateFolder *template.Template
@@ -93,7 +94,8 @@ func Start() {
mux.HandleFunc("/index", showIndex)
mux.HandleFunc("/login", showLogin)
mux.HandleFunc("/logout", doLogout)
- mux.HandleFunc("/upload", requireLogin(uploadFile, true))
+ mux.HandleFunc("/uploadChunk", requireLogin(uploadChunk, true))
+ mux.HandleFunc("/uploadComplete", requireLogin(uploadComplete, true))
mux.HandleFunc("/error-auth", showErrorAuth)
mux.Handle("/main.wasm", gziphandler.GzipHandler(http.HandlerFunc(serveWasm)))
if configuration.Get().Authentication.Method == authentication.OAuth2 {
@@ -105,8 +107,8 @@ func Start() {
fmt.Println("Binding webserver to " + configuration.Get().Port)
srv = http.Server{
Addr: configuration.Get().Port,
- ReadTimeout: timeOutWebserver,
- WriteTimeout: timeOutWebserver,
+ ReadTimeout: timeOutWebserverRead,
+ WriteTimeout: timeOutWebserverWrite,
Handler: mux,
}
infoMessage := "Webserver can be accessed at " + configuration.Get().ServerUrl + "admin\nPress CTRL+C to stop Gokapi"
@@ -249,7 +251,9 @@ func showLogin(w http.ResponseWriter, r *http.Request) {
redirect(w, "admin")
return
}
- time.Sleep(3 * time.Second)
+ select {
+ case <-time.After(3 * time.Second):
+ }
failedLogin = true
}
err = templateFolder.ExecuteTemplate(w, "login", LoginView{
@@ -300,7 +304,9 @@ func showDownload(w http.ResponseWriter, r *http.Request) {
if configuration.HashPassword(enteredPassword, true) != file.PasswordHash && !isValidPwCookie(r, file) {
if enteredPassword != "" {
view.IsFailedLogin = true
- time.Sleep(1 * time.Second)
+ select {
+ case <-time.After(1 * time.Second):
+ }
}
err := templateFolder.ExecuteTemplate(w, "download_password", view)
helper.Check(err)
@@ -348,7 +354,9 @@ func deleteFile(w http.ResponseWriter, r *http.Request) {
func queryUrl(w http.ResponseWriter, r *http.Request, redirectUrl string) string {
keys, ok := r.URL.Query()["id"]
if !ok || len(keys[0]) < configuration.Get().LengthId {
- time.Sleep(500 * time.Millisecond)
+ select {
+ case <-time.After(500 * time.Millisecond):
+ }
redirect(w, redirectUrl)
return ""
}
@@ -446,20 +454,28 @@ func (u *UploadView) convertGlobalConfig(isMainView bool) *UploadView {
return u
}
-// Handling of /upload
-// If the user is authenticated, this parses the uploaded file from the Multipart Form and
-// adds it to the system.
-func uploadFile(w http.ResponseWriter, r *http.Request) {
+// Handling of /uploadChunk
+// If the user is authenticated, this parses the uploaded chunk and stores it
+func uploadChunk(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
- err := fileupload.Process(w, r, true, configuration.Get().MaxMemory)
+ err := fileupload.ProcessNewChunk(w, r, false)
+ responseError(w, err)
+}
+
+// Handling of /uploadComplete
+// If the user is authenticated, this parses the uploaded chunk and stores it
+func uploadComplete(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/json; charset=UTF-8")
+ err := fileupload.CompleteChunk(w, r, false)
responseError(w, err)
}
// Outputs an error in json format
func responseError(w http.ResponseWriter, err error) {
if err != nil {
+ w.WriteHeader(http.StatusBadRequest)
_, _ = io.WriteString(w, "{\"Result\":\"error\",\"ErrorMessage\":\""+err.Error()+"\"}")
- helper.Check(err)
+ log.Println(err)
}
}
@@ -507,14 +523,16 @@ func writeFilePwCookie(w http.ResponseWriter, file models.File) {
}
// Checks if a cookie contains the correct password hash for a password-protected file
-// If incorrect, a 3 second delay is introduced unless the cookie was empty.
+// If incorrect, a 3-second delay is introduced unless the cookie was empty.
func isValidPwCookie(r *http.Request, file models.File) bool {
cookie, err := r.Cookie("p" + file.Id)
if err == nil {
if cookie.Value == file.PasswordHash {
return true
}
- time.Sleep(3 * time.Second)
+ select {
+ case <-time.After(3 * time.Second):
+ }
}
return false
}
diff --git a/internal/webserver/Webserver_test.go b/internal/webserver/Webserver_test.go
index 9a07f93..7fd5817 100644
--- a/internal/webserver/Webserver_test.go
+++ b/internal/webserver/Webserver_test.go
@@ -449,7 +449,13 @@ func TestDeleteFileInvalidKey(t *testing.T) {
func TestPostUploadNoAuth(t *testing.T) {
t.Parallel()
test.HttpPostUploadRequest(t, test.HttpTestConfig{
- Url: "http://127.0.0.1:53843/upload",
+ Url: "http://127.0.0.1:53843/uploadChunk",
+ UploadFileName: "test/fileupload.jpg",
+ UploadFieldName: "file",
+ RequiredContent: []string{"{\"Result\":\"error\",\"ErrorMessage\":\"Not authenticated\"}"},
+ })
+ test.HttpPostUploadRequest(t, test.HttpTestConfig{
+ Url: "http://127.0.0.1:53843/uploadComplete",
UploadFileName: "test/fileupload.jpg",
UploadFieldName: "file",
RequiredContent: []string{"{\"Result\":\"error\",\"ErrorMessage\":\"Not authenticated\"}"},
@@ -458,11 +464,44 @@ func TestPostUploadNoAuth(t *testing.T) {
func TestPostUpload(t *testing.T) {
test.HttpPostUploadRequest(t, test.HttpTestConfig{
- Url: "http://127.0.0.1:53843/upload",
+ Url: "http://127.0.0.1:53843/uploadChunk",
UploadFileName: "test/fileupload.jpg",
UploadFieldName: "file",
- RequiredContent: []string{"{\"Result\":\"OK\"", "\"Name\":\"fileupload.jpg\"", "\"SHA256\":\"a9993e364706816aba3e25717850c26c9cd0d89d\"", "DownloadsRemaining\":3"},
- ExcludedContent: []string{"\"Id\":\"\"", "HotlinkId\":\"\""},
+ PostValues: []test.PostBody{{
+ Key: "dztotalfilesize",
+ Value: "50",
+ }, {
+ Key: "dzchunkbyteoffset",
+ Value: "0",
+ }, {
+ Key: "dzuuid",
+ Value: "eeng4ier3Taen7a",
+ }},
+ RequiredContent: []string{"{\"result\":\"OK\"}"},
+ ExcludedContent: []string{"\"Id\":\"\"", "HotlinkId\":\"\"", "ErrorMessage"},
+ Cookies: []test.Cookie{{
+ Name: "session_token",
+ Value: "validsession",
+ }},
+ })
+
+ test.HttpPostRequest(t, test.HttpTestConfig{
+ Url: "http://127.0.0.1:53843/uploadComplete",
+ PostValues: []test.PostBody{{
+ Key: "chunkid",
+ Value: "eeng4ier3Taen7a",
+ }, {
+ Key: "filename",
+ Value: "fileupload.jpg",
+ }, {
+ Key: "filecontenttype",
+ Value: "test-content",
+ }, {
+ Key: "filesize",
+ Value: "50",
+ }},
+ RequiredContent: []string{"{\"Result\":\"OK\"", "\"Name\":\"fileupload.jpg\"", "DownloadsRemaining\":3"},
+ ExcludedContent: []string{"\"Id\":\"\"", "HotlinkId\":\"\"", "ErrorMessage"},
Cookies: []test.Cookie{{
Name: "session_token",
Value: "validsession",
@@ -645,9 +684,9 @@ func TestDisableLogin(t *testing.T) {
func TestResponseError(t *testing.T) {
w, _ := test.GetRecorder("GET", "/", nil, nil, nil)
- err := errors.New("testerror")
- defer test.ExpectPanic(t)
- responseError(w, err)
+ responseError(w, errors.New("testerror"))
+ test.IsEqualInt(t, w.Result().StatusCode, 400)
+ test.ResponseBodyContains(t, w, "testerror")
}
func TestShowErrorAuth(t *testing.T) {
@@ -658,3 +697,11 @@ func TestShowErrorAuth(t *testing.T) {
IsHtml: true,
})
}
+
+func TestServeWasm(t *testing.T) {
+ t.Parallel()
+ test.HttpPageResult(t, test.HttpTestConfig{
+ Url: "http://localhost:53843/main.wasm",
+ IsHtml: false,
+ })
+}
diff --git a/internal/webserver/api/Api.go b/internal/webserver/api/Api.go
index bce4fb3..7844300 100644
--- a/internal/webserver/api/Api.go
+++ b/internal/webserver/api/Api.go
@@ -26,6 +26,10 @@ func Process(w http.ResponseWriter, r *http.Request, maxMemory int) {
return
}
switch request.requestUrl {
+ case "/chunk/add":
+ chunkAdd(w, request)
+ case "/chunk/complete":
+ chunkComplete(w, request)
case "/files/list":
list(w)
case "/files/add":
@@ -62,31 +66,50 @@ func NewKey() string {
}
func changeFriendlyName(w http.ResponseWriter, request apiRequest) {
- if !IsValidApiKey(request.apiKeyToModify, false) {
+ if !IsValidApiKey(request.apiInfo.apiKeyToModify, false) {
sendError(w, http.StatusBadRequest, "Invalid api key provided.")
return
}
- if request.friendlyName == "" {
- request.friendlyName = "Unnamed key"
+ if request.apiInfo.friendlyName == "" {
+ request.apiInfo.friendlyName = "Unnamed key"
}
- key, ok := database.GetApiKey(request.apiKeyToModify)
+ key, ok := database.GetApiKey(request.apiInfo.apiKeyToModify)
if !ok {
sendError(w, http.StatusInternalServerError, "Could not modify API key")
return
}
- if key.FriendlyName != request.friendlyName {
- key.FriendlyName = request.friendlyName
+ if key.FriendlyName != request.apiInfo.friendlyName {
+ key.FriendlyName = request.apiInfo.friendlyName
database.SaveApiKey(key, false)
}
}
func deleteFile(w http.ResponseWriter, request apiRequest) {
- ok := storage.DeleteFile(request.fileId, true)
+ ok := storage.DeleteFile(request.fileInfo.id, true)
if !ok {
sendError(w, http.StatusBadRequest, "Invalid id provided.")
}
}
+func chunkAdd(w http.ResponseWriter, request apiRequest) {
+ err := fileupload.ProcessNewChunk(w, request.request, true)
+ if err != nil {
+ sendError(w, http.StatusBadRequest, err.Error())
+ }
+}
+func chunkComplete(w http.ResponseWriter, request apiRequest) {
+ err := request.request.ParseForm()
+ if err != nil {
+ sendError(w, http.StatusBadRequest, err.Error())
+ }
+ request.request.Form.Set("chunkid", request.request.Form.Get("uuid"))
+ err = fileupload.CompleteChunk(w, request.request, true)
+ if err != nil {
+ sendError(w, http.StatusBadRequest, err.Error())
+ return
+ }
+}
+
func list(w http.ResponseWriter) {
var validFiles []models.FileApiOutput
timeNow := time.Now().Unix()
@@ -111,23 +134,22 @@ func upload(w http.ResponseWriter, request apiRequest, maxMemory int) {
}
func duplicateFile(w http.ResponseWriter, request apiRequest) {
-
- err := request.request.ParseForm()
+ err := request.parseForm()
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
return
}
- file, ok := storage.GetFile(request.request.Form.Get("id"))
+ file, ok := storage.GetFile(request.fileInfo.id)
if !ok {
sendError(w, http.StatusBadRequest, "Invalid id provided.")
return
}
- uploadRequest, paramsToChange, filename, err := apiRequestToUploadRequest(request.request)
+ err = request.parseUploadRequest()
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
return
}
- newFile, err := storage.DuplicateFile(file, paramsToChange, filename, uploadRequest)
+ newFile, err := storage.DuplicateFile(file, request.fileInfo.paramsToChange, request.fileInfo.filename, request.fileInfo.uploadRequest)
if err != nil {
sendError(w, http.StatusBadRequest, err.Error())
return
@@ -139,6 +161,73 @@ func duplicateFile(w http.ResponseWriter, request apiRequest) {
_, _ = w.Write(result)
}
+func isAuthorisedForApi(w http.ResponseWriter, request apiRequest) bool {
+ if IsValidApiKey(request.apiKey, true) || sessionmanager.IsValidSession(w, request.request) {
+ return true
+ }
+ sendError(w, http.StatusUnauthorized, "Unauthorized")
+ return false
+}
+
+func sendError(w http.ResponseWriter, errorInt int, errorMessage string) {
+ w.WriteHeader(errorInt)
+ _, _ = w.Write([]byte("{\"Result\":\"error\",\"ErrorMessage\":\"" + errorMessage + "\"}"))
+}
+
+type apiRequest struct {
+ apiKey string
+ requestUrl string
+ request *http.Request
+ fileInfo fileInfo
+ apiInfo apiInfo
+}
+
+func (a *apiRequest) parseUploadRequest() error {
+ uploadRequest, paramsToChange, filename, err := apiRequestToUploadRequest(a.request)
+ if err != nil {
+ return err
+ }
+ a.fileInfo.uploadRequest = uploadRequest
+ a.fileInfo.paramsToChange = paramsToChange
+ a.fileInfo.filename = filename
+ return nil
+}
+
+func (a *apiRequest) parseForm() error {
+ err := a.request.ParseForm()
+ if err != nil {
+ return err
+ }
+ if a.request.Form.Get("id") != "" {
+ a.fileInfo.id = a.request.Form.Get("id")
+ }
+ return nil
+}
+
+type fileInfo struct {
+ id string // apiRequest.parseForm() needs to be called first if id is encoded in form
+ uploadRequest models.UploadRequest // apiRequest.parseUploadRequest() needs to be called first
+ paramsToChange int // apiRequest.parseUploadRequest() needs to be called first
+ filename string // apiRequest.parseUploadRequest() needs to be called first
+}
+
+type apiInfo struct {
+ friendlyName string
+ apiKeyToModify string
+}
+
+func parseRequest(r *http.Request) apiRequest {
+ return apiRequest{
+ apiKey: r.Header.Get("apikey"),
+ requestUrl: strings.Replace(r.URL.String(), "/api", "", 1),
+ request: r,
+ fileInfo: fileInfo{id: r.Header.Get("id")},
+ apiInfo: apiInfo{
+ friendlyName: r.Header.Get("friendlyName"),
+ apiKeyToModify: r.Header.Get("apiKeyToModify")},
+ }
+}
+
func apiRequestToUploadRequest(request *http.Request) (models.UploadRequest, int, string, error) {
paramsToChange := 0
allowedDownloads := 0
@@ -147,7 +236,11 @@ func apiRequestToUploadRequest(request *http.Request) (models.UploadRequest, int
unlimitedDownloads := false
password := ""
fileName := ""
- var err error
+
+ err := request.ParseForm()
+ if err != nil {
+ return models.UploadRequest{}, 0, "", err
+ }
if request.Form.Get("allowedDownloads") != "" {
paramsToChange = paramsToChange | storage.ParamDownloads
@@ -191,39 +284,6 @@ func apiRequestToUploadRequest(request *http.Request) (models.UploadRequest, int
}, paramsToChange, fileName, nil
}
-func isAuthorisedForApi(w http.ResponseWriter, request apiRequest) bool {
- if IsValidApiKey(request.apiKey, true) || sessionmanager.IsValidSession(w, request.request) {
- return true
- }
- sendError(w, http.StatusUnauthorized, "Unauthorized")
- return false
-}
-
-func sendError(w http.ResponseWriter, errorInt int, errorMessage string) {
- w.WriteHeader(errorInt)
- _, _ = w.Write([]byte("{\"Result\":\"error\",\"ErrorMessage\":\"" + errorMessage + "\"}"))
-}
-
-type apiRequest struct {
- apiKey string
- requestUrl string
- fileId string
- friendlyName string
- apiKeyToModify string
- request *http.Request
-}
-
-func parseRequest(r *http.Request) apiRequest {
- return apiRequest{
- apiKey: r.Header.Get("apikey"),
- fileId: r.Header.Get("id"),
- friendlyName: r.Header.Get("friendlyName"),
- apiKeyToModify: r.Header.Get("apiKeyToModify"),
- requestUrl: strings.Replace(r.URL.String(), "/api", "", 1),
- request: r,
- }
-}
-
// IsValidApiKey checks if the API key provides is valid. If modifyTime is true, it also automatically updates
// the lastUsed timestamp
func IsValidApiKey(key string, modifyTime bool) bool {
diff --git a/internal/webserver/api/Api_test.go b/internal/webserver/api/Api_test.go
index ffe79af..9a2dfaa 100644
--- a/internal/webserver/api/Api_test.go
+++ b/internal/webserver/api/Api_test.go
@@ -3,6 +3,7 @@ package api
import (
"bytes"
"encoding/json"
+ "fmt"
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
"github.com/forceu/gokapi/internal/models"
@@ -407,6 +408,97 @@ func TestUploadAndDuplication(t *testing.T) {
test.IsEqualString(t, resultDuplication.Name, "test.test")
}
+func TestChunkUpload(t *testing.T) {
+ err := os.WriteFile("test/tmpupload", []byte("chunktestfile"), 0600)
+ test.IsNil(t, err)
+ body, formcontent := test.FileToMultipartFormBody(t, test.HttpTestConfig{
+ UploadFileName: "test/tmpupload",
+ UploadFieldName: "file",
+ PostValues: []test.PostBody{{
+ Key: "filesize",
+ Value: "13",
+ }, {
+ Key: "offset",
+ Value: "0",
+ }, {
+ Key: "uuid",
+ Value: "tmpupload123",
+ }},
+ })
+ w, r := test.GetRecorder("POST", "/api/chunk/add", nil, []test.Header{{
+ Name: "apikey",
+ Value: "validkey",
+ }}, body)
+ r.Header.Add("Content-Type", formcontent)
+ Process(w, r, maxMemory)
+ test.IsEqualInt(t, w.Code, 200)
+ test.ResponseBodyContains(t, w, "OK")
+
+ body, formcontent = test.FileToMultipartFormBody(t, test.HttpTestConfig{
+ UploadFileName: "test/tmpupload",
+ UploadFieldName: "file",
+ PostValues: []test.PostBody{{
+ Key: "dztotalfilesize",
+ Value: "13",
+ }, {
+ Key: "dzchunkbyteoffset",
+ Value: "0",
+ }, {
+ Key: "dzuuid",
+ Value: "tmpupload123",
+ }},
+ })
+ w, r = test.GetRecorder("POST", "/api/chunk/add", nil, []test.Header{{
+ Name: "apikey",
+ Value: "validkey",
+ }}, body)
+ r.Header.Add("Content-Type", formcontent)
+ Process(w, r, maxMemory)
+ test.IsEqualInt(t, w.Code, 400)
+ test.ResponseBodyContains(t, w, "error")
+}
+
+func TestChunkComplete(t *testing.T) {
+ data := url.Values{}
+ data.Set("uuid", "tmpupload123")
+ data.Set("filename", "test.upload")
+ data.Set("filesize", "13")
+
+ w, r := test.GetRecorder("POST", "/api/chunk/complete", nil, []test.Header{
+ {Name: "apikey", Value: "validkey"},
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ Process(w, r, maxMemory)
+ test.IsEqualInt(t, w.Code, 200)
+ result := struct {
+ FileInfo models.FileApiOutput `json:"FileInfo"`
+ }{}
+ response, err := io.ReadAll(w.Result().Body)
+ fmt.Println(string(response))
+ test.IsNil(t, err)
+ err = json.Unmarshal(response, &result)
+ test.IsNil(t, err)
+ test.IsEqualString(t, result.FileInfo.Name, "test.upload")
+
+ data.Set("filesize", "15")
+
+ w, r = test.GetRecorder("POST", "/api/chunk/complete", nil, []test.Header{
+ {Name: "apikey", Value: "validkey"},
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader(data.Encode()))
+ Process(w, r, maxMemory)
+ test.IsEqualInt(t, w.Code, 400)
+ test.ResponseBodyContains(t, w, "error")
+
+ w, r = test.GetRecorder("POST", "/api/chunk/complete", nil, []test.Header{
+ {Name: "apikey", Value: "validkey"},
+ {Name: "Content-type", Value: "application/x-www-form-urlencoded"}},
+ strings.NewReader("invalid&&§$%"))
+ Process(w, r, maxMemory)
+ test.IsEqualInt(t, w.Code, 400)
+ test.ResponseBodyContains(t, w, "error")
+}
+
func TestList(t *testing.T) {
w, r := test.GetRecorder("GET", "/api/files/list", nil, []test.Header{{
Name: "apikey",
diff --git a/internal/webserver/authentication/sessionmanager/SessionManager.go b/internal/webserver/authentication/sessionmanager/SessionManager.go
index 95ae0b4..f225153 100644
--- a/internal/webserver/authentication/sessionmanager/SessionManager.go
+++ b/internal/webserver/authentication/sessionmanager/SessionManager.go
@@ -55,7 +55,7 @@ func useSession(w http.ResponseWriter, id string, session models.Session) bool {
func CreateSession(w http.ResponseWriter) {
sessionString := helper.GenerateRandomString(60)
database.SaveSession(sessionString, models.Session{
- RenewAt: time.Now().Add(time.Hour).Unix(),
+ RenewAt: time.Now().Add(12 * time.Hour).Unix(),
ValidUntil: time.Now().Add(cookieLifeAdmin).Unix(),
}, cookieLifeAdmin)
writeSessionCookie(w, sessionString, time.Now().Add(cookieLifeAdmin))
diff --git a/internal/webserver/downloadstatus/DownloadStatus_test.go b/internal/webserver/downloadstatus/DownloadStatus_test.go
index 051a530..1416605 100644
--- a/internal/webserver/downloadstatus/DownloadStatus_test.go
+++ b/internal/webserver/downloadstatus/DownloadStatus_test.go
@@ -16,7 +16,7 @@ func TestMain(m *testing.M) {
Id: "test",
Name: "testName",
Size: "3 B",
- SHA256: "123456",
+ SHA1: "123456",
ExpireAt: 500,
ExpireAtString: "expire",
DownloadsRemaining: 1,
diff --git a/internal/webserver/fileupload/FileUpload.go b/internal/webserver/fileupload/FileUpload.go
index 0c03c23..aa196d2 100644
--- a/internal/webserver/fileupload/FileUpload.go
+++ b/internal/webserver/fileupload/FileUpload.go
@@ -3,9 +3,9 @@ package fileupload
import (
"github.com/forceu/gokapi/internal/configuration"
"github.com/forceu/gokapi/internal/configuration/database"
- "github.com/forceu/gokapi/internal/helper"
"github.com/forceu/gokapi/internal/models"
"github.com/forceu/gokapi/internal/storage"
+ "github.com/forceu/gokapi/internal/storage/chunking"
"io"
"net/http"
"strconv"
@@ -18,26 +18,64 @@ func Process(w http.ResponseWriter, r *http.Request, isWeb bool, maxMemory int)
if err != nil {
return err
}
- var config models.UploadRequest
- if isWeb {
- config = parseConfig(r.Form, true)
- } else {
- config = parseConfig(r.Form, false)
- }
+ defer r.MultipartForm.RemoveAll()
+ config := parseConfig(r.Form, isWeb)
file, header, err := r.FormFile("file")
if err != nil {
return err
}
result, err := storage.NewFile(file, header, config)
+ defer file.Close()
if err != nil {
return err
}
+ _, _ = io.WriteString(w, result.ToJsonResult(config.ExternalUrl))
+ return nil
+}
+
+// ProcessNewChunk processes a file chunk upload request
+func ProcessNewChunk(w http.ResponseWriter, r *http.Request, isApiCall bool) error {
+ err := r.ParseMultipartForm(int64(configuration.Get().MaxMemory) * 1024 * 1024)
+ if err != nil {
+ return err
+ }
+ defer r.MultipartForm.RemoveAll()
+ chunkInfo, err := chunking.ParseChunkInfo(r, isApiCall)
+ if err != nil {
+ return err
+ }
+ file, header, err := r.FormFile("file")
+ if err != nil {
+ return err
+ }
+
+ err = chunking.NewChunk(file, header, chunkInfo)
defer file.Close()
- _, err = io.WriteString(w, result.ToJsonResult(config.ExternalUrl))
- helper.Check(err)
- err = r.MultipartForm.RemoveAll()
- helper.Check(err)
+ if err != nil {
+ return err
+ }
+ _, _ = io.WriteString(w, "{\"result\":\"OK\"}")
+ return nil
+}
+
+func CompleteChunk(w http.ResponseWriter, r *http.Request, isApiCall bool) error {
+ err := r.ParseForm()
+ if err != nil {
+ return err
+ }
+ chunkId := r.Form.Get("chunkid")
+ config := parseConfig(r.Form, !isApiCall)
+ header, err := chunking.ParseFileHeader(r)
+ if err != nil {
+ return err
+ }
+
+ result, err := storage.NewFileFromChunk(chunkId, header, config)
+ if err != nil {
+ return err
+ }
+ _, _ = io.WriteString(w, result.ToJsonResult(config.ExternalUrl))
return nil
}
@@ -84,7 +122,6 @@ func parseConfig(values formOrHeader, setNewDefaults bool) models.UploadRequest
Password: password,
ExternalUrl: settings.ServerUrl,
MaxMemory: settings.MaxMemory,
- DataDir: settings.DataDir,
UnlimitedTime: unlimitedTime,
UnlimitedDownload: unlimitedDownload,
}
diff --git a/internal/webserver/fileupload/FileUpload_test.go b/internal/webserver/fileupload/FileUpload_test.go
index cbf84e3..b70c03c 100644
--- a/internal/webserver/fileupload/FileUpload_test.go
+++ b/internal/webserver/fileupload/FileUpload_test.go
@@ -12,8 +12,10 @@ import (
"mime/multipart"
"net/http"
"net/http/httptest"
+ "net/url"
"os"
"reflect"
+ "strings"
"testing"
)
@@ -57,9 +59,16 @@ func TestParseConfig(t *testing.T) {
}
func TestProcess(t *testing.T) {
- w := httptest.NewRecorder()
- r := getRecorder()
+ w, r := test.GetRecorder("POST", "/upload", nil, nil, strings.NewReader("invalid§$%&%§"))
err := Process(w, r, false, 20)
+ test.IsNotNil(t, err)
+
+ data := url.Values{}
+ data.Set("file", "invalid")
+
+ w = httptest.NewRecorder()
+ r = getFileUploadRecorder(false)
+ err = Process(w, r, false, 20)
test.IsNil(t, err)
resp := w.Result()
body, _ := io.ReadAll(resp.Body)
@@ -70,23 +79,86 @@ func TestProcess(t *testing.T) {
test.IsEqualString(t, result.Url, "http://127.0.0.1:53843/d?id=")
test.IsEqualString(t, result.HotlinkUrl, "http://127.0.0.1:53843/hotlink/")
test.IsEqualString(t, result.FileInfo.Name, "testFile")
- test.IsEqualString(t, result.FileInfo.SHA256, "17513aad503256b7fdc94d613aeb87b8338c433a")
test.IsEqualString(t, result.FileInfo.Size, "11 B")
test.IsEqualBool(t, result.FileInfo.UnlimitedTime, false)
test.IsEqualBool(t, result.FileInfo.UnlimitedDownloads, false)
}
-func getRecorder() *http.Request {
+func TestProcessNewChunk(t *testing.T) {
+ w, r := test.GetRecorder("POST", "/uploadChunk", nil, nil, strings.NewReader("invalid§$%&%§"))
+ err := ProcessNewChunk(w, r, false)
+ test.IsNotNil(t, err)
+
+ w = httptest.NewRecorder()
+ r = getFileUploadRecorder(false)
+ err = ProcessNewChunk(w, r, false)
+ test.IsNotNil(t, err)
+
+ w = httptest.NewRecorder()
+ r = getFileUploadRecorder(true)
+ err = ProcessNewChunk(w, r, false)
+ test.IsNil(t, err)
+ response, err := io.ReadAll(w.Result().Body)
+ test.IsNil(t, err)
+ test.IsEqualString(t, string(response), "{\"result\":\"OK\"}")
+}
+
+func TestCompleteChunk(t *testing.T) {
+ w, r := test.GetRecorder("POST", "/uploadComplete", nil, nil, strings.NewReader("invalid§$%&%§"))
+ err := CompleteChunk(w, r, false)
+ test.IsNotNil(t, err)
+
+ w = httptest.NewRecorder()
+ r = getFileUploadRecorder(false)
+ err = CompleteChunk(w, r, false)
+ test.IsNotNil(t, err)
+
+ data := url.Values{}
+ data.Set("allowedDownloads", "9")
+ data.Set("expiryDays", "5")
+ data.Set("password", "123")
+ data.Set("chunkid", "randomchunkuuid")
+ data.Set("filename", "random.file")
+ data.Set("filesize", "13")
+ w, r = test.GetRecorder("POST", "/uploadComplete", nil, nil, strings.NewReader(data.Encode()))
+ r.Header.Set("Content-type", "application/x-www-form-urlencoded")
+ err = CompleteChunk(w, r, false)
+ test.IsNil(t, err)
+
+ result := struct {
+ FileInfo models.FileApiOutput `json:"FileInfo"`
+ }{}
+ response, err := io.ReadAll(w.Result().Body)
+ test.IsNil(t, err)
+ err = json.Unmarshal(response, &result)
+ test.IsNil(t, err)
+ test.IsEqualString(t, result.FileInfo.Name, "random.file")
+
+ data.Set("chunkid", "invalid")
+ w, r = test.GetRecorder("POST", "/uploadComplete", nil, nil, strings.NewReader(data.Encode()))
+ r.Header.Set("Content-type", "application/x-www-form-urlencoded")
+ err = CompleteChunk(w, r, false)
+ test.IsNotNil(t, err)
+}
+
+func getFileUploadRecorder(addChunkInfo bool) *http.Request {
var b bytes.Buffer
w := multipart.NewWriter(&b)
+ if addChunkInfo {
+ w.WriteField("dztotalfilesize", "13")
+ w.WriteField("dzchunkbyteoffset", "0")
+ w.WriteField("dzuuid", "randomchunkuuid")
+ }
writer, _ := w.CreateFormFile("file", "testFile")
io.WriteString(writer, "testContent")
w.Close()
r := httptest.NewRequest("POST", "/upload", &b)
r.Header.Set("Content-Type", w.FormDataContentType())
- r.Header.Add("allowedDownloads", "9")
- r.Header.Add("expiryDays", "5")
- r.Header.Add("password", "123")
+ if !addChunkInfo {
+ r.Header.Add("allowedDownloads", "9")
+ r.Header.Add("expiryDays", "5")
+ r.Header.Add("password", "123")
+ }
return r
}
diff --git a/internal/webserver/web/static/apidocumentation/openapi.json b/internal/webserver/web/static/apidocumentation/openapi.json
index 4316736..66e4edc 100644
--- a/internal/webserver/web/static/apidocumentation/openapi.json
+++ b/internal/webserver/web/static/apidocumentation/openapi.json
@@ -56,13 +56,89 @@
}
}
},
+ "/chunk/add": {
+ "post": {
+ "tags": [
+ "files"
+ ],
+ "summary": "Uploads a new chunk",
+ "description": "Uploads a file in chunks, in case a reverse proxy does not support upload of larger files. Parallel uploading is supported. Must call /chunk/complete after all chunks have been uploaded.",
+ "operationId": "chunkadd",
+ "requestBody": {
+ "content": {
+ "multipart/form-data": {
+ "schema": {
+ "$ref": "#/components/schemas/chunking"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Operation successful",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/chunkUploadResult"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid input"
+ },
+ "401": {
+ "description": "Invalid API key provided or not logged in as admin"
+ }
+ }
+ }
+ },
+ "/chunk/complete": {
+ "post": {
+ "tags": [
+ "files"
+ ],
+ "summary": "Finalises uploaded chunks",
+ "description": "Needs to be called after all chunks have been uploaded. Adds the uploaded file to Gokapi.",
+ "operationId": "chunkcomplete",
+ "requestBody": {
+ "content": {
+ "application/x-www-form-urlencoded": {
+ "schema": {
+ "$ref": "#/components/schemas/chunkingcomplete"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Operation successful",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UploadResult"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid input"
+ },
+ "401": {
+ "description": "Invalid API key provided or not logged in as admin"
+ }
+ }
+ }
+ },
"/files/add": {
"post": {
"tags": [
"files"
],
- "summary": "Adds a new file",
- "description": "This API call uploads the submitted file to Gokapi",
+ "summary": "Adds a new file without chunking",
+ "description": "Uploads the submitted file to Gokapi. Please note: This method does not use chunking, therefore if you are behind a reverse proxy or have a provider that limits upload filesizes, this might not work for bigger files (e.g. Cloudflare).",
"operationId": "add",
"requestBody": {
"content": {
@@ -226,9 +302,6 @@
"Size": {
"type": "string"
},
- "SHA256": {
- "type": "string"
- },
"HotlinkId": {
"type": "string"
},
@@ -272,19 +345,32 @@
"description": "File is a struct used for saving information about an uploaded file",
"x-go-package": "Gokapi/internal/models"
},
+ "chunkUploadResult": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "type": "string"
+ }
+ },
+ "description": "Result after uploading a chunk",
+ "x-go-package": "Gokapi/internal/models"
+ },
"UploadResult": {
"type": "object",
"properties": {
+ "Result": {
+ "type": "string"
+ },
"FileInfo": {
"$ref": "#/components/schemas/File"
},
"HotlinkUrl": {
"type": "string"
},
- "Result": {
+ "Url": {
"type": "string"
},
- "Url": {
+ "GenericHotlinkUrl": {
"type": "string"
}
},
@@ -346,6 +432,65 @@
"description": "Sets a new filename. Filename will be unchanged if empty."
}
}
+ },"chunking": {
+ "required": [
+ "file","uuid","filesize","offset"
+ ],
+ "type": "object",
+ "properties": {
+ "file": {
+ "type": "string",
+ "description": "The file to be uploaded",
+ "format": "binary"
+ },
+ "uuid": {
+ "type": "string",
+ "description": "A unique ID that has to be the same for all chunks of a single file. Needs to be at least 10 characters long.",
+ },
+ "filesize": {
+ "type": "integer",
+ "description": "The total filesize of the final file in bytes"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "The chunk's offset starting at the beginning of the file"
+ }
+ }
+ },"chunkingcomplete": {
+ "required": [
+ "uuid","filename","filesize"
+ ],
+ "type": "object",
+ "properties": {
+ "uuid": {
+ "type": "string",
+ "description": "The unique ID that was used for the uploaded chunks",
+ },
+ "filename": {
+ "type": "string",
+ "description": "The filename of the uploaded file",
+ },
+ "filesize": {
+ "type": "integer",
+ "description": "The total filesize of the uploaded file in bytes"
+ },
+ "contenttype": {
+ "type": "string",
+ "description": "The MIME content type. If empty, application/octet-stream will be used."
+ },
+ "allowedDownloads": {
+ "type": "integer",
+ "description": "How many downloads are allowed. Last used value from web interface will be used if empty. Unlimited if 0 is passed."
+ },
+ "expiryDays": {
+ "type": "integer",
+ "description": "How many days the file will be stored. Last used value from web interface will be used if empty. Unlimited if 0 is passed."
+ },
+ "password": {
+ "type": "string",
+ "description": "Password for this file to be set. No password will be used if empty"
+ }
+ }
}
},
"securitySchemes": {
diff --git a/internal/webserver/web/static/js/admin.js b/internal/webserver/web/static/js/admin.js
index cbc416b..8d5ad47 100644
--- a/internal/webserver/web/static/js/admin.js
+++ b/internal/webserver/web/static/js/admin.js
@@ -5,16 +5,14 @@ var dropzoneObject;
Dropzone.options.uploaddropzone = {
paramName: "file",
- maxFilesize: 102400, // 100 GB
- timeout: 7200000,
+ dictDefaultMessage: "Drop files, paste or click here to upload",
createImageThumbnails: false,
- success: function (file, response) {
- addRow(response)
- this.removeFile(file);
+ chunksUploaded: function(file, done) {
+ sendChunkComplete(file, done);
},
- init: function () {
+ init: function() {
dropzoneObject = this;
- this.on("sending", function (file, xhr, formData) {
+ this.on("sending", function(file, xhr, formData) {
formData.append("allowedDownloads", document.getElementById("allowedDownloads").value);
formData.append("expiryDays", document.getElementById("expiryDays").value);
formData.append("password", document.getElementById("password").value);
@@ -24,7 +22,7 @@ Dropzone.options.uploaddropzone = {
},
};
-document.onpaste = function (event) {
+document.onpaste = function(event) {
var items = (event.clipboardData || event.originalEvent.clipboardData).items;
for (index in items) {
var item = items[index];
@@ -32,7 +30,7 @@ document.onpaste = function (event) {
dropzoneObject.addFile(item.getAsFile());
}
if (item.kind === 'string') {
- item.getAsString(function (s) {
+ item.getAsString(function(s) {
// If a picture was copied from a website, the origin information might be submitted, which is filtered with this regex out
const pattern = /
/gi;
if (pattern.test(s) === false) {
@@ -50,6 +48,63 @@ document.onpaste = function (event) {
}
}
+function urlencodeFormData(fd) {
+ let s = '';
+
+ function encode(s) {
+ return encodeURIComponent(s).replace(/%20/g, '+');
+ }
+ for (var pair of fd.entries()) {
+ if (typeof pair[1] == 'string') {
+ s += (s ? '&' : '') + encode(pair[0]) + '=' + encode(pair[1]);
+ }
+ }
+ return s;
+}
+
+function sendChunkComplete(file, done) {
+ var xhr = new XMLHttpRequest();
+ xhr.open("POST", "./uploadComplete", true);
+ xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
+
+ let formData = new FormData();
+ formData.append("allowedDownloads", document.getElementById("allowedDownloads").value);
+ formData.append("expiryDays", document.getElementById("expiryDays").value);
+ formData.append("password", document.getElementById("password").value);
+ formData.append("isUnlimitedDownload", !document.getElementById("enableDownloadLimit").checked);
+ formData.append("isUnlimitedTime", !document.getElementById("enableTimeLimit").checked);
+ formData.append("chunkid", file.upload.uuid);
+ formData.append("filesize", file.size);
+ formData.append("filename", file.name);
+ formData.append("filecontenttype", file.type);
+
+ xhr.onreadystatechange = function() {
+ if (this.readyState == 4) {
+ if (this.status == 200) {
+ Dropzone.instances[0].removeFile(file);
+ addRow(xhr.response);
+ done();
+ } else {
+ file.accepted = false;
+ Dropzone.instances[0]._errorProcessing([file], getErrorMessage(xhr.responseText));
+ }
+ }
+ };
+
+
+ xhr.send(urlencodeFormData(formData));
+}
+
+function getErrorMessage(response) {
+ let result;
+ try {
+ result = JSON.parse(response);
+ } catch (e) {
+ return "Unknown error: Server could not process file";
+ }
+ return "Error processing file: " + result.ErrorMessage;
+}
+
function checkBoxChanged(checkBox, correspondingInput) {
let disable = !checkBox.checked;
@@ -65,11 +120,15 @@ function checkBoxChanged(checkBox, correspondingInput) {
}
function parseData(data) {
- if (!data) return {"Result": "error"};
+ if (!data) return {
+ "Result": "error"
+ };
if (typeof data === 'object') return data;
if (typeof data === 'string') return JSON.parse(data);
- return {"Result": "error"};
+ return {
+ "Result": "error"
+ };
}
function addRow(jsonText) {
diff --git a/internal/webserver/web/templates/html_admin.tmpl b/internal/webserver/web/templates/html_admin.tmpl
index 208b5fe..9d75ec5 100644
--- a/internal/webserver/web/templates/html_admin.tmpl
+++ b/internal/webserver/web/templates/html_admin.tmpl
@@ -7,7 +7,7 @@
Upload
-
@@ -98,7 +98,13 @@
diff --git a/internal/webserver/web/templates/string_constants.tmpl b/internal/webserver/web/templates/string_constants.tmpl
index 7b1406a..7d88d2d 100644
--- a/internal/webserver/web/templates/string_constants.tmpl
+++ b/internal/webserver/web/templates/string_constants.tmpl
@@ -1,2 +1,2 @@
{{define "app_name"}}Gokapi{{end}}
-{{define "version"}}1.5.2{{end}}
+{{define "version"}}1.6.0{{end}}
diff --git a/openapi.json b/openapi.json
index 4316736..66e4edc 100644
--- a/openapi.json
+++ b/openapi.json
@@ -56,13 +56,89 @@
}
}
},
+ "/chunk/add": {
+ "post": {
+ "tags": [
+ "files"
+ ],
+ "summary": "Uploads a new chunk",
+ "description": "Uploads a file in chunks, in case a reverse proxy does not support upload of larger files. Parallel uploading is supported. Must call /chunk/complete after all chunks have been uploaded.",
+ "operationId": "chunkadd",
+ "requestBody": {
+ "content": {
+ "multipart/form-data": {
+ "schema": {
+ "$ref": "#/components/schemas/chunking"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Operation successful",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/chunkUploadResult"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid input"
+ },
+ "401": {
+ "description": "Invalid API key provided or not logged in as admin"
+ }
+ }
+ }
+ },
+ "/chunk/complete": {
+ "post": {
+ "tags": [
+ "files"
+ ],
+ "summary": "Finalises uploaded chunks",
+ "description": "Needs to be called after all chunks have been uploaded. Adds the uploaded file to Gokapi.",
+ "operationId": "chunkcomplete",
+ "requestBody": {
+ "content": {
+ "application/x-www-form-urlencoded": {
+ "schema": {
+ "$ref": "#/components/schemas/chunkingcomplete"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Operation successful",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UploadResult"
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Invalid input"
+ },
+ "401": {
+ "description": "Invalid API key provided or not logged in as admin"
+ }
+ }
+ }
+ },
"/files/add": {
"post": {
"tags": [
"files"
],
- "summary": "Adds a new file",
- "description": "This API call uploads the submitted file to Gokapi",
+ "summary": "Adds a new file without chunking",
+ "description": "Uploads the submitted file to Gokapi. Please note: This method does not use chunking, therefore if you are behind a reverse proxy or have a provider that limits upload filesizes, this might not work for bigger files (e.g. Cloudflare).",
"operationId": "add",
"requestBody": {
"content": {
@@ -226,9 +302,6 @@
"Size": {
"type": "string"
},
- "SHA256": {
- "type": "string"
- },
"HotlinkId": {
"type": "string"
},
@@ -272,19 +345,32 @@
"description": "File is a struct used for saving information about an uploaded file",
"x-go-package": "Gokapi/internal/models"
},
+ "chunkUploadResult": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "type": "string"
+ }
+ },
+ "description": "Result after uploading a chunk",
+ "x-go-package": "Gokapi/internal/models"
+ },
"UploadResult": {
"type": "object",
"properties": {
+ "Result": {
+ "type": "string"
+ },
"FileInfo": {
"$ref": "#/components/schemas/File"
},
"HotlinkUrl": {
"type": "string"
},
- "Result": {
+ "Url": {
"type": "string"
},
- "Url": {
+ "GenericHotlinkUrl": {
"type": "string"
}
},
@@ -346,6 +432,65 @@
"description": "Sets a new filename. Filename will be unchanged if empty."
}
}
+ },"chunking": {
+ "required": [
+ "file","uuid","filesize","offset"
+ ],
+ "type": "object",
+ "properties": {
+ "file": {
+ "type": "string",
+ "description": "The file to be uploaded",
+ "format": "binary"
+ },
+ "uuid": {
+ "type": "string",
+ "description": "A unique ID that has to be the same for all chunks of a single file. Needs to be at least 10 characters long.",
+ },
+ "filesize": {
+ "type": "integer",
+ "description": "The total filesize of the final file in bytes"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "The chunk's offset starting at the beginning of the file"
+ }
+ }
+ },"chunkingcomplete": {
+ "required": [
+ "uuid","filename","filesize"
+ ],
+ "type": "object",
+ "properties": {
+ "uuid": {
+ "type": "string",
+ "description": "The unique ID that was used for the uploaded chunks",
+ },
+ "filename": {
+ "type": "string",
+ "description": "The filename of the uploaded file",
+ },
+ "filesize": {
+ "type": "integer",
+ "description": "The total filesize of the uploaded file in bytes"
+ },
+ "contenttype": {
+ "type": "string",
+ "description": "The MIME content type. If empty, application/octet-stream will be used."
+ },
+ "allowedDownloads": {
+ "type": "integer",
+ "description": "How many downloads are allowed. Last used value from web interface will be used if empty. Unlimited if 0 is passed."
+ },
+ "expiryDays": {
+ "type": "integer",
+ "description": "How many days the file will be stored. Last used value from web interface will be used if empty. Unlimited if 0 is passed."
+ },
+ "password": {
+ "type": "string",
+ "description": "Password for this file to be set. No password will be used if empty"
+ }
+ }
}
},
"securitySchemes": {