diff --git a/build/go-generate/minifyStaticContent.go b/build/go-generate/minifyStaticContent.go index d6b5021..960cfba 100644 --- a/build/go-generate/minifyStaticContent.go +++ b/build/go-generate/minifyStaticContent.go @@ -4,7 +4,7 @@ package main import ( "fmt" - "github.com/tdewolff/minify/v2" + minify "github.com/tdewolff/minify/v2" "github.com/tdewolff/minify/v2/css" "github.com/tdewolff/minify/v2/js" "os" diff --git a/build/go-generate/updateProtectedUrls.go b/build/go-generate/updateProtectedUrls.go index da74cfe..a5fe33b 100644 --- a/build/go-generate/updateProtectedUrls.go +++ b/build/go-generate/updateProtectedUrls.go @@ -8,7 +8,7 @@ import ( "regexp" "strings" - "golang.org/x/exp/slices" + slices "golang.org/x/exp/slices" ) const fileSetup = "../../internal/webserver/Webserver.go" diff --git a/build/go.mod b/build/go.mod index ba2e29e..aa83579 100644 --- a/build/go.mod +++ b/build/go.mod @@ -3,10 +3,11 @@ module github.com/forceu/gokapi go 1.22 require ( - github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/NYTimes/gziphandler v1.1.1 - github.com/aws/aws-sdk-go v1.53.19 + github.com/alicebob/miniredis/v2 v2.33.0 + github.com/aws/aws-sdk-go v1.54.11 github.com/caarlos0/env/v6 v6.10.1 + github.com/gomodule/redigo v1.9.2 github.com/jinzhu/copier v0.4.0 github.com/johannesboyne/gofakes3 v0.0.0-20240513200200-99de01ee122d github.com/juju/ratelimit v1.0.2 @@ -17,12 +18,11 @@ require ( golang.org/x/sync v0.7.0 golang.org/x/term v0.21.0 gopkg.in/yaml.v3 v3.0.1 - modernc.org/sqlite v1.30.0 - github.com/tdewolff/minify/v2 v2.20.7 - github.com/tdewolff/parse/v2 v2.7.5 + modernc.org/sqlite v1.30.1 ) require ( + github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/go-jose/go-jose/v4 v4.0.2 // indirect github.com/google/uuid v1.6.0 // indirect @@ -33,10 +33,13 @@ require ( github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect github.com/shabbyrobe/gocovmerge v0.0.0-20230507112040-c3350d9342df // indirect + github.com/tdewolff/minify/v2 v2.20.34 // indirect + github.com/tdewolff/parse/v2 v2.7.15 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect golang.org/x/tools v0.22.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b // indirect - modernc.org/libc v1.52.1 // indirect + modernc.org/libc v1.53.4 // indirect modernc.org/mathutil v1.6.0 // indirect modernc.org/memory v1.8.0 // indirect modernc.org/strutil v1.2.0 // indirect diff --git a/build/go.sum b/build/go.sum index 84f8f91..2d27e44 100644 --- a/build/go.sum +++ b/build/go.sum @@ -42,11 +42,14 @@ git.mills.io/prologic/bitcask v1.0.2/go.mod h1:ppXpR3haeYrijyJDleAkSGH3p90w6sIHx git.sr.ht/~shabbyrobe/gocovmerge v0.0.0-20180507124511-f6ea450bfb63/go.mod h1:7YhY1ru/6vTScuHp4NpcCVCUIyfTdPK7+h4NaJohCCk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/abcum/lcp v0.0.0-20201209214815-7a3f3840be81/go.mod h1:6ZvnjTZX1LNo1oLpfaJK8h+MXqHxcBFBIwkgsv+xlv0= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.33.0/go.mod h1:MhP4a3EU7aENRi9aO+tHfTBZicLqQevyi/DJpoj6mi0= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= @@ -62,6 +65,7 @@ github.com/aws/aws-sdk-go v1.49.22/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3Tj github.com/aws/aws-sdk-go v1.51.7/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go v1.51.25/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go v1.53.19/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= +github.com/aws/aws-sdk-go v1.54.11/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= @@ -157,6 +161,7 @@ github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/gomodule/redigo v1.9.2/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -245,6 +250,7 @@ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:C github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -260,6 +266,7 @@ github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNx github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-sqlite3 v1.14.18/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mediocregopher/radix/v3 v3.8.1/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -350,9 +357,11 @@ github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69 github.com/tdewolff/minify/v2 v2.12.5 h1:s2KDBt/D/3ayE3gcqQF8VIgTmYgkx+btuLvVAeePzZM= github.com/tdewolff/minify/v2 v2.12.5/go.mod h1:i8QXtVyL7Ddwc4I5gqzvgBqKlTMgMNTbiXaPO4Iqg+A= github.com/tdewolff/minify/v2 v2.20.7/go.mod h1:bj2NpP3zoUhsPzE4oM4JYwuUyVCU/uMaCYZ6/riEjIo= +github.com/tdewolff/minify/v2 v2.20.34/go.mod h1:L1VYef/jwKw6Wwyk5A+T0mBjjn3mMPgmjjA688RNsxU= github.com/tdewolff/parse/v2 v2.6.5 h1:lYvWBk55GkqKl0JJenGpmrgu/cPHQQ6/Mm1hBGswoGQ= github.com/tdewolff/parse/v2 v2.6.5/go.mod h1:woz0cgbLwFdtbjJu8PIKxhW05KplTFQkOdX78o+Jgrs= github.com/tdewolff/parse/v2 v2.7.5/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= +github.com/tdewolff/parse/v2 v2.7.15/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= github.com/tdewolff/test v1.0.7 h1:8Vs0142DmPFW/bQeHRP3MV19m1gvndjUb1sn8yy74LM= github.com/tdewolff/test v1.0.7/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= @@ -369,6 +378,7 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= @@ -856,6 +866,7 @@ modernc.org/libc v1.40.5/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE= modernc.org/libc v1.47.0/go.mod h1:gzCncw0a74aCiVqHeWAYHHaW//fkSHHS/3S/gfhLlCI= modernc.org/libc v1.49.3/go.mod h1:yMZuGkn7pXbKfoT/M35gFJOAEdSKdxL0q64sF7KqCDo= modernc.org/libc v1.52.1/go.mod h1:HR4nVzFDSDizP620zcMCgjb1/8xk2lg5p/8yjfGv1IQ= +modernc.org/libc v1.53.4/go.mod h1:aGsLofnkcct8lTJnKQnCqJO37ERAXSHamSuWLFoF2Cw= modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E= modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU= @@ -866,6 +877,7 @@ modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0 modernc.org/sqlite v1.29.5/go.mod h1:S02dvcmm7TnTRvGhv8IGYyLnIt7AS2KPaB1F/71p75U= modernc.org/sqlite v1.29.8/go.mod h1:lQPm27iqa4UNZpmr4Aor0MH0HkCLbt1huYDfWylLZFk= modernc.org/sqlite v1.30.0/go.mod h1:cgkTARJ9ugeXSNaLBPK3CqbOe7Ec7ZhWPoMFGldEYEw= +modernc.org/sqlite v1.30.1/go.mod h1:DUmsiWQDaAvU4abhc/N+djlom/L2o8f7gZ95RCvyoLU= modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0= modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/cmd/gokapi/Main.go b/cmd/gokapi/Main.go index 1652aa9..0b8f3eb 100644 --- a/cmd/gokapi/Main.go +++ b/cmd/gokapi/Main.go @@ -8,6 +8,7 @@ Main routine import ( "fmt" + "github.com/forceu/gokapi/internal/configuration/database/migration" "github.com/forceu/gokapi/internal/helper/systemd" "os" "os/signal" @@ -45,14 +46,17 @@ const versionGokapi = "1.8.4" func main() { passedFlags := flagparser.ParseFlags() handleServiceInstall(passedFlags) + handleDbMigration(passedFlags) showVersion(passedFlags) fmt.Println(logo) fmt.Println("Gokapi v" + versionGokapi + " starting") setup.RunIfFirstStart() configuration.Load() + if !reconfigureServer(passedFlags) { + configuration.ConnectDatabase() + } setDeploymentPassword(passedFlags) - reconfigureServer(passedFlags) encryption.Init(*configuration.Get()) authentication.Init(configuration.Get().Authentication) createSsl(passedFlags) @@ -148,10 +152,12 @@ func initCloudConfig(passedFlags flagparser.MainFlags) { } // Checks for command line arguments that have to be parsed after loading the configuration -func reconfigureServer(passedFlags flagparser.MainFlags) { +func reconfigureServer(passedFlags flagparser.MainFlags) bool { if passedFlags.Reconfigure { setup.RunConfigModification() + return true } + return false } func createSsl(passedFlags flagparser.MainFlags) { @@ -160,6 +166,14 @@ func createSsl(passedFlags flagparser.MainFlags) { } } +func handleDbMigration(passedFlags flagparser.MainFlags) { + if !passedFlags.Migration.DoMigration { + return + } + migration.Do(passedFlags.Migration) + osExit(0) +} + func handleServiceInstall(passedFlags flagparser.MainFlags) { if passedFlags.InstallService && passedFlags.UninstallService { fmt.Println("Error: Both install and uninstall flags are set.") diff --git a/docs/advanced.rst b/docs/advanced.rst index ba2ed91..a01c574 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -57,41 +57,45 @@ Available environment variables ================================== -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| Name | Action | Persistent [*]_ | Default | -+=============================+=====================================================================================+=================+=============================+ -| GOKAPI_CHUNK_SIZE_MB | Sets the size of chunks that are uploaded in MB | Yes | 45 | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_CONFIG_DIR | Sets the directory for the config file | No | config | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_CONFIG_FILE | Sets the name of the config file | No | config.json | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_DATA_DIR | Sets the directory for the data | Yes | data | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_DB_NAME | Sets the name for the database file | No | gokapi.sqlite | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_LENGTH_ID | Sets the length of the download IDs. Value needs to be 5 or more | Yes | 15 | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_MAX_FILESIZE | Sets the maximum allowed file size in MB | Yes | 102400 (100GB) | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_MAX_MEMORY_UPLOAD | Sets the amount of RAM in MB that can be allocated for an upload chunk or file | Yes | 50 | -| | | | | -| | Any chunk or file with a size greater than that will be written to a temporary file | | | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_MAX_PARALLEL_UPLOADS | Set the amount of chunks that are uploaded in parallel for a single file | Yes | 4 | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_PORT | Sets the webserver port | Yes | 53842 | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_DISABLE_CORS_CHECK | Disables the CORS check on startup and during setup, if set to “true” | No | false | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| GOKAPI_LOG_STDOUT | Also outputs all log file entries to the console output | No | false | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| DOCKER_NONROOT | Docker only: Runs the binary in the container as a non-root user, if set to “true” | No | false | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ -| TMPDIR | Sets the path which contains temporary files | No | Non-Docker: Default OS path | -| | | | | -| | | | Docker: [DATA_DIR] | -+-----------------------------+-------------------------------------------------------------------------------------+-----------------+-----------------------------+ ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| Name | Action | Persistent [*]_ | Default | ++===============================+=====================================================================================+=================+======================================+ +| GOKAPI_CHUNK_SIZE_MB | Sets the size of chunks that are uploaded in MB | Yes | 45 | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_CONFIG_DIR | Sets the directory for the config file | No | config | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_CONFIG_FILE | Sets the name of the config file | No | config.json | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_DATA_DIR | Sets the directory for the data | Yes | data | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_DATABASE_URL | Sets the type and location of the database. See :ref:`Databases` | Yes | sqlite://[data folder]/gokapi.sqlite | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_LENGTH_ID | Sets the length of the download IDs. Value needs to be 5 or more | Yes | 15 | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_MAX_FILESIZE | Sets the maximum allowed file size in MB | Yes | 102400 (100GB) | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_MAX_MEMORY_UPLOAD | Sets the amount of RAM in MB that can be allocated for an upload chunk or file | Yes | 50 | +| | | | | +| | Any chunk or file with a size greater than that will be written to a temporary file | | | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_MAX_PARALLEL_UPLOADS | Set the amount of chunks that are uploaded in parallel for a single file | Yes | 4 | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_PORT | Sets the webserver port | Yes | 53842 | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_DISABLE_CORS_CHECK | Disables the CORS check on startup and during setup, if set to "true" | No | false | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_LOG_STDOUT | Also outputs all log file entries to the console output | No | false | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| DOCKER_NONROOT | Docker only: Runs the binary in the container as a non-root user, if set to "true" | No | false | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| TMPDIR | Sets the path which contains temporary files | No | Non-Docker: Default OS path | +| | | | | +| | | | Docker: [DATA_DIR] | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ +| GOKAPI_DB_NAME *(deprecated)* | Sets the name for the database file. | No | gokapi.sqlite | +| | | | | +| | *Deprecated: Only used during update. Will be removed with v1.10.0* | | | ++-------------------------------+-------------------------------------------------------------------------------------+-----------------+--------------------------------------+ .. [*] Variables that are persistent must be submitted during the first start when Gokapi creates a new config file. They can be omitted afterwards. Non-persistent variables need to be set on every start. @@ -123,6 +127,85 @@ All values that are described in :ref:`cloudstorage` can be passed as environmen +---------------------------+-----------------------------------------+-----------------------------+ + +.. _databases: + + +******************************** +Databases +******************************** + +By default, Gokapi uses an SQLite database for data storage, which should suffice for most use cases. Additionally, Redis is available as an experimental option. + + + +Migrating to a different database +================================= + +To switch to a different database, Gokapi provides a migration tool. By running: + +:: + + gokapi --migrate [old Database URL] [new Database URL] + +all existing data, except for user sessions, will be transferred to the new database. After the migration, you will need to rerun the setup and specify the new database location. For details on the correct database URL format, refer to the section :ref:`databaseUrl`. + +For Docker users, the command is: +:: + + docker run --rm -v gokapi-data:/app/data f0rc3/gokapi:latest /app/run.sh [old Database URL] [new Database URL] + + +.. _databaseUrl: + +Database URL format +--------------------------------- + +Database URLs must start with either ``sqlite://`` or ``redis://``. + +For SQLite, the path to the database follows the prefix. No additional options are allowed. + +For Redis, the URL can include authentication credentials (username and password), an optional prefix for keys, and parameter to use SSL. + + +Redis URL Format +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A Redis URL has the following structure: +:: + + redis://[username:password@]host[:port][?options] + +* username: (optional) The username for authentication. +* password: (optional) The password for authentication. +* host: (required) The address of the Redis server. +* port: (optional) The port of the Redis server (default is 6379). +* options: (optional) Additional options such as SSL (``ssl=true``) and key prefix (``prefix=``). + + +Examples +--------------------------------- + +Migrating SQLite (``/app/data/gokapi.sqlite``) to Redis (``127.0.0.1:6379``): + + +:: + + gokapi --migrate sqlite:///app/data/gokapi.sqlite redis://127.0.0.1:6379 + +Migrating SQLite (``/app/data/gokapi.sqlite``) to SQLite (``./data/gokapi.sqlite``): + +:: + + gokapi --migrate sqlite:///app/data/gokapi.sqlite sqlite://./data/gokapi.sqlite + +Migrating Redis (``127.0.0.1:6379, User: test, Password: 1234, Prefix: gokapi_, using SSL``) to SQLite (``./data/gokapi.sqlite``): + + +:: + + gokapi --migrate "redis://test:1234@127.0.0.1:6379?prefix=gokapi_&ssl=true" sqlite://./data/gokapi.sqlite + .. _api: diff --git a/docs/setup.rst b/docs/setup.rst index 661ba43..0e69e9c 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -86,6 +86,23 @@ During the first start, a new configuration file will be created and you will be +Database +"""""""""""""" +By default, Gokapi stores its data in a database located in the ``data`` directory. You can specify a different database location in this menu. If no changes are needed, you can proceed as is. Experimental Redis support is also available. + +You can configure the following settings: + +- **Type of database** Choose either SQLite or Redis. +- **Database location** Specify the path to the SQLite database. +- **Database host** Provide the host and port number for the Redis database. +- **Key prefix (optional)** This prefix will be added to all keys to prevent conflicts if the database is shared with other applications. +- **Username (optional)** Enter the username for database connection. +- **Password (optional)** Enter the password for database connection. +- **Use SSL** Select this option to establish an SSL connection. + +.. warning:: + The Redis password will be stored in plain text and can be viewed when re-running the setup. + Webserver """""""""""""" diff --git a/go.mod b/go.mod index ba2e29e..aa83579 100644 --- a/go.mod +++ b/go.mod @@ -3,10 +3,11 @@ module github.com/forceu/gokapi go 1.22 require ( - github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/NYTimes/gziphandler v1.1.1 - github.com/aws/aws-sdk-go v1.53.19 + github.com/alicebob/miniredis/v2 v2.33.0 + github.com/aws/aws-sdk-go v1.54.11 github.com/caarlos0/env/v6 v6.10.1 + github.com/gomodule/redigo v1.9.2 github.com/jinzhu/copier v0.4.0 github.com/johannesboyne/gofakes3 v0.0.0-20240513200200-99de01ee122d github.com/juju/ratelimit v1.0.2 @@ -17,12 +18,11 @@ require ( golang.org/x/sync v0.7.0 golang.org/x/term v0.21.0 gopkg.in/yaml.v3 v3.0.1 - modernc.org/sqlite v1.30.0 - github.com/tdewolff/minify/v2 v2.20.7 - github.com/tdewolff/parse/v2 v2.7.5 + modernc.org/sqlite v1.30.1 ) require ( + github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/go-jose/go-jose/v4 v4.0.2 // indirect github.com/google/uuid v1.6.0 // indirect @@ -33,10 +33,13 @@ require ( github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect github.com/shabbyrobe/gocovmerge v0.0.0-20230507112040-c3350d9342df // indirect + github.com/tdewolff/minify/v2 v2.20.34 // indirect + github.com/tdewolff/parse/v2 v2.7.15 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect golang.org/x/tools v0.22.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b // indirect - modernc.org/libc v1.52.1 // indirect + modernc.org/libc v1.53.4 // indirect modernc.org/mathutil v1.6.0 // indirect modernc.org/memory v1.8.0 // indirect modernc.org/strutil v1.2.0 // indirect diff --git a/go.sum b/go.sum index 1d5baef..b2f2c1d 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,12 @@ -github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= -github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.33.0 h1:uvTF0EDeu9RLnUEG27Db5I68ESoIxTiXbNUiji6lZrA= +github.com/alicebob/miniredis/v2 v2.33.0/go.mod h1:MhP4a3EU7aENRi9aO+tHfTBZicLqQevyi/DJpoj6mi0= github.com/aws/aws-sdk-go v1.44.256/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= -github.com/aws/aws-sdk-go v1.53.19 h1:WEuWc918RXlIaPCyU11F7hH9H1ItK+8m2c/uoQNRUok= -github.com/aws/aws-sdk-go v1.53.19/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= +github.com/aws/aws-sdk-go v1.54.11 h1:Zxuv/R+IVS0B66yz4uezhxH9FN9/G2nbxejYqAMFjxk= +github.com/aws/aws-sdk-go v1.54.11/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/caarlos0/env/v6 v6.10.1 h1:t1mPSxNpei6M5yAeu1qtRdPAK29Nbcf/n3G7x+b3/II= github.com/caarlos0/env/v6 v6.10.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoEaJU= @@ -16,6 +18,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/go-jose/go-jose/v4 v4.0.2 h1:R3l3kkBds16bO7ZFAEEcofK0MkrAJt3jlJznWZG0nvk= github.com/go-jose/go-jose/v4 v4.0.2/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY= +github.com/gomodule/redigo v1.9.2 h1:HrutZBLhSIU8abiSfW8pj8mPhOyMYjZT/wcA4/L9L9s= +github.com/gomodule/redigo v1.9.2/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo= @@ -34,7 +38,6 @@ github.com/johannesboyne/gofakes3 v0.0.0-20240513200200-99de01ee122d h1:9dIJ/sx3 github.com/johannesboyne/gofakes3 v0.0.0-20240513200200-99de01ee122d/go.mod h1:AxgWC4DDX54O2WDoQO1Ceabtn6IbktjU/7bigor+66g= github.com/juju/ratelimit v1.0.2 h1:sRxmtRiajbvrcLQT7S+JbqU0ntsb9W2yhSdNN8tWfaI= github.com/juju/ratelimit v1.0.2/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= -github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -60,20 +63,24 @@ github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTd github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/tdewolff/minify/v2 v2.20.7 h1:NUkuzJ9dvQUNJjSdmmrfELa/ZpnMdyMR/ZKU2bw7N/E= -github.com/tdewolff/minify/v2 v2.20.7/go.mod h1:bj2NpP3zoUhsPzE4oM4JYwuUyVCU/uMaCYZ6/riEjIo= -github.com/tdewolff/parse/v2 v2.7.5 h1:RdcN3Ja6zAMSvnxxO047xRoWexX3RrXKi3H6EQHzXto= -github.com/tdewolff/parse/v2 v2.7.5/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tdewolff/minify/v2 v2.20.34 h1:XueI6sQtgS7du45fyBCNkNfPQ9SINaYavMFNOxp37SA= +github.com/tdewolff/minify/v2 v2.20.34/go.mod h1:L1VYef/jwKw6Wwyk5A+T0mBjjn3mMPgmjjA688RNsxU= +github.com/tdewolff/parse/v2 v2.7.15 h1:hysDXtdGZIRF5UZXwpfn3ZWRbm+ru4l53/ajBRGpCTw= +github.com/tdewolff/parse/v2 v2.7.15/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/exp v0.0.0-20231108232855-2478ac86f678 h1:mchzmB1XO2pMaKFRqk/+MV3mgGG96aqaPXaMifQU47w= +golang.org/x/exp v0.0.0-20231108232855-2478ac86f678/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= golang.org/x/exp v0.0.0-20240604190554-fc45aab8b7f8 h1:LoYXNGAShUG3m/ehNk4iFctuhGX/+R1ZpfJ4/ia80JM= golang.org/x/exp v0.0.0-20240604190554-fc45aab8b7f8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -88,8 +95,6 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= -golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -124,8 +129,6 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= -golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -144,18 +147,18 @@ gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -modernc.org/cc/v4 v4.21.2 h1:dycHFB/jDc3IyacKipCNSDrjIC0Lm1hyoWOZTRR20Lk= -modernc.org/cc/v4 v4.21.2/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ= -modernc.org/ccgo/v4 v4.17.10 h1:6wrtRozgrhCxieCeJh85QsxkX/2FFrT9hdaWPlbn4Zo= -modernc.org/ccgo/v4 v4.17.10/go.mod h1:0NBHgsqTTpm9cA5z2ccErvGZmtntSM9qD2kFAs6pjXM= +modernc.org/cc/v4 v4.21.3 h1:2mhBdWKtivdFlLR1ecKXTljPG1mfvbByX7QKztAIJl8= +modernc.org/cc/v4 v4.21.3/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ= +modernc.org/ccgo/v4 v4.18.2 h1:PUQPShG4HwghpOekNujL0sFavdkRvmxzTbI4rGJ5mg0= +modernc.org/ccgo/v4 v4.18.2/go.mod h1:ao1fAxf9a2KEOL15WY8+yP3wnpaOpP/QuyFOZ9HJolM= modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE= modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ= modernc.org/gc/v2 v2.4.1 h1:9cNzOqPyMJBvrUipmynX0ZohMhcxPtMccYgGOJdOiBw= modernc.org/gc/v2 v2.4.1/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU= modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b h1:BnN1t+pb1cy61zbvSUV7SeI0PwosMhlAEi/vBY4qxp8= modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4= -modernc.org/libc v1.52.1 h1:uau0VoiT5hnR+SpoWekCKbLqm7v6dhRL3hI+NQhgN3M= -modernc.org/libc v1.52.1/go.mod h1:HR4nVzFDSDizP620zcMCgjb1/8xk2lg5p/8yjfGv1IQ= +modernc.org/libc v1.53.4 h1:YAgFS7tGIFBfqje2UOqiXtIwuDUCF8AUonYw0seup34= +modernc.org/libc v1.53.4/go.mod h1:aGsLofnkcct8lTJnKQnCqJO37ERAXSHamSuWLFoF2Cw= modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4= modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E= @@ -164,8 +167,8 @@ modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc= modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss= -modernc.org/sqlite v1.30.0 h1:8YhPUs/HTnlEgErn/jSYQTwHN/ex8CjHHjg+K9iG7LM= -modernc.org/sqlite v1.30.0/go.mod h1:cgkTARJ9ugeXSNaLBPK3CqbOe7Ec7ZhWPoMFGldEYEw= +modernc.org/sqlite v1.30.1 h1:YFhPVfu2iIgUf9kuA1CR7iiHdcEEsI2i+yjRYHscyxk= +modernc.org/sqlite v1.30.1/go.mod h1:DUmsiWQDaAvU4abhc/N+djlom/L2o8f7gZ95RCvyoLU= modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA= modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0= modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= diff --git a/internal/configuration/Configuration.go b/internal/configuration/Configuration.go index 62e1b30..8d62e60 100644 --- a/internal/configuration/Configuration.go +++ b/internal/configuration/Configuration.go @@ -77,7 +77,8 @@ func Load() { settings, err := loadFromFile(Environment.ConfigPath) helper.Check(err) serverSettings = settings - database.Init(serverSettings.DataDir, Environment.DatabaseName) + usesHttps = strings.HasPrefix(strings.ToLower(serverSettings.ServerUrl), "https://") + if configupgrade.DoUpgrade(&serverSettings, &Environment) { save() } @@ -93,7 +94,14 @@ func Load() { helper.CreateDir(serverSettings.DataDir) filesystem.Init(serverSettings.DataDir) log.Init(Environment.DataDir) - usesHttps = strings.HasPrefix(strings.ToLower(serverSettings.ServerUrl), "https://") +} + +// ConnectDatabase loads the database that is defined in the configuration +func ConnectDatabase() { + dbConfig, err := database.ParseUrl(serverSettings.DatabaseUrl, false) + helper.Check(err) + database.Connect(dbConfig) + database.Upgrade() } // UsesHttps returns true if Gokapi URL is set to a secure URL @@ -127,10 +135,6 @@ func save() { func LoadFromSetup(config models.Configuration, cloudConfig *cloudconfig.CloudConfig, isInitialSetup bool) { Environment = environment.New() helper.CreateDir(Environment.ConfigDir) - if !isInitialSetup { - Load() - database.DeleteAllSessions() - } serverSettings = config if cloudConfig != nil { @@ -148,6 +152,8 @@ func LoadFromSetup(config models.Configuration, cloudConfig *cloudconfig.CloudCo } save() Load() + ConnectDatabase() + database.DeleteAllSessions() } // SetDeploymentPassword sets a new password. This should only be used for non-interactive deployment, but is not enforced diff --git a/internal/configuration/Configuration_test.go b/internal/configuration/Configuration_test.go index be44124..35598a9 100644 --- a/internal/configuration/Configuration_test.go +++ b/internal/configuration/Configuration_test.go @@ -60,6 +60,7 @@ func TestLoadFromSetup(t *testing.T) { MaxMemory: 10, UseSsl: true, MaxFileSizeMB: 199, + DatabaseUrl: "sqlite://./test/gokapi.sqlite", } newCloudConfig := cloudconfig.CloudConfig{Aws: models.AwsConfig{ Bucket: "bucket", diff --git a/internal/configuration/configupgrade/Upgrade.go b/internal/configuration/configupgrade/Upgrade.go index aca6ddc..b6480a4 100644 --- a/internal/configuration/configupgrade/Upgrade.go +++ b/internal/configuration/configupgrade/Upgrade.go @@ -2,15 +2,13 @@ package configupgrade import ( "fmt" - "github.com/forceu/gokapi/internal/configuration/database" "github.com/forceu/gokapi/internal/environment" - "github.com/forceu/gokapi/internal/helper" "github.com/forceu/gokapi/internal/models" "os" ) // CurrentConfigVersion is the version of the configuration structure. Used for upgrading -const CurrentConfigVersion = 20 +const CurrentConfigVersion = 21 // DoUpgrade checks if an old version is present and updates it to the current version if required func DoUpgrade(settings *models.Configuration, env *environment.Environment) bool { @@ -47,15 +45,9 @@ func updateConfig(settings *models.Configuration, env *environment.Environment) settings.ChunkSize = env.ChunkSizeMB settings.MaxParallelUploads = env.MaxParallelUploads } - // < v1.8.5 - if settings.ConfigVersion < 20 { - err := database.RawSqlite(`DROP TABLE UploadStatus; CREATE TABLE "UploadStatus" ( - "ChunkId" TEXT NOT NULL UNIQUE, - "CurrentStatus" INTEGER NOT NULL, - "CreationDate" INTEGER NOT NULL, - PRIMARY KEY("ChunkId") -) WITHOUT ROWID;`) - helper.Check(err) + // < v1.9.0 + if settings.ConfigVersion < 21 { + settings.DatabaseUrl = "sqlite://" + env.DataDir + "/" + env.DatabaseName } } diff --git a/internal/configuration/configupgrade/Upgrade_test.go b/internal/configuration/configupgrade/Upgrade_test.go index 2124380..637f381 100644 --- a/internal/configuration/configupgrade/Upgrade_test.go +++ b/internal/configuration/configupgrade/Upgrade_test.go @@ -1,7 +1,6 @@ package configupgrade import ( - "github.com/forceu/gokapi/internal/configuration/database" "github.com/forceu/gokapi/internal/environment" "github.com/forceu/gokapi/internal/models" "github.com/forceu/gokapi/internal/test" @@ -34,9 +33,7 @@ func TestUpgradeDb(t *testing.T) { upgradeDone := DoUpgrade(&oldConfigFile, &env) test.IsEqualBool(t, upgradeDone, true) test.IsEqualInt(t, exitCode, 1) - database.Close() - database.Init("./test", "gokapi.sqlite") exitCode = 0 oldConfigFile.ConfigVersion = 17 upgradeDone = DoUpgrade(&oldConfigFile, &env) diff --git a/internal/configuration/database/Database.go b/internal/configuration/database/Database.go index 6e35e1c..d2ae4f8 100644 --- a/internal/configuration/database/Database.go +++ b/internal/configuration/database/Database.go @@ -1,155 +1,263 @@ package database import ( - "database/sql" + "errors" "fmt" + "github.com/forceu/gokapi/internal/configuration/database/dbabstraction" "github.com/forceu/gokapi/internal/helper" - "log" - // Required for sqlite driver - _ "modernc.org/sqlite" - "os" - "path/filepath" + "github.com/forceu/gokapi/internal/models" + "net/url" + "strings" ) -var sqliteDb *sql.DB +var db dbabstraction.Database -// Init creates the database files and connects to it -func Init(dataDir, dbName string) { - if sqliteDb == nil { - dataDir = filepath.Clean(dataDir) - var err error - if !helper.FolderExists(dataDir) { - err = os.MkdirAll(dataDir, 0700) - helper.Check(err) - } - dbFullPath := dataDir + "/" + dbName - sqliteDb, err = sql.Open("sqlite", dbFullPath+"?_pragma=busy_timeout=10000&_pragma=journal_mode=WAL") - if err != nil { - log.Fatal(err) - } - sqliteDb.SetMaxOpenConns(10000) - sqliteDb.SetMaxIdleConns(10000) +var currentDbVersion = 2 - if !helper.FileExists(dbFullPath) { - createNewDatabase() +// Connect establishes a connection to the database and creates the table structure, if necessary +func Connect(config models.DbConnection) { + var err error + db, err = dbabstraction.GetNew(config) + if err != nil { + panic(err) + } +} + +// ParseUrl converts a database URL to a models.DbConnection struct +func ParseUrl(dbUrl string, mustExist bool) (models.DbConnection, error) { + if dbUrl == "" { + return models.DbConnection{}, errors.New("dbUrl is empty") + } + u, err := url.Parse(dbUrl) + if err != nil { + return models.DbConnection{}, fmt.Errorf("unsupported database URL - expected format is: type://username:password@server: %v", err) + } + result := models.DbConnection{} + switch strings.ToLower(u.Scheme) { + case "sqlite": + result.Type = dbabstraction.TypeSqlite + result.HostUrl = strings.TrimPrefix(dbUrl, "sqlite://") + if mustExist && !helper.FileExists(result.HostUrl) { + return models.DbConnection{}, fmt.Errorf("file %s does not exist\n", result.HostUrl) } + case "redis": + result.Type = dbabstraction.TypeRedis + result.HostUrl = u.Host + default: + return models.DbConnection{}, fmt.Errorf("unsupported database type: %s\n", dbUrl) + } + + query := u.Query() + + result.Username = u.User.Username() + result.Password, _ = u.User.Password() + result.RedisUseSsl = query.Has("ssl") + result.RedisPrefix = query.Get("prefix") + + return result, nil +} + +// Migrate copies a database to a new location +func Migrate(configOld, configNew models.DbConnection) { + dbOld, err := dbabstraction.GetNew(configOld) + helper.Check(err) + dbNew, err := dbabstraction.GetNew(configNew) + helper.Check(err) + + apiKeys := dbOld.GetAllApiKeys() + for _, apiKey := range apiKeys { + dbNew.SaveApiKey(apiKey) + } + dbNew.SaveEnd2EndInfo(dbOld.GetEnd2EndInfo()) + files := dbOld.GetAllMetadata() + for _, file := range files { + dbNew.SaveMetaData(file) + if file.HotlinkId != "" { + dbNew.SaveHotlink(file) + } + } + defaults, ok := dbOld.GetUploadDefaults() + if ok { + dbNew.SaveUploadDefaults(defaults) + } + dbOld.Close() + dbNew.Close() +} + +// RunGarbageCollection runs the databases GC +func RunGarbageCollection() { + db.RunGarbageCollection() +} + +// Upgrade migrates the DB to a new Gokapi version, if required +func Upgrade() { + dbVersion := db.GetDbVersion() + if dbVersion < currentDbVersion { + db.Upgrade(currentDbVersion) + db.SetDbVersion(currentDbVersion) } } // Close the database connection func Close() { - if sqliteDb != nil { - err := sqliteDb.Close() - if err != nil { - fmt.Println(err) - } + db.Close() +} + +// Api Key Section + +// GetAllApiKeys returns a map with all API keys +func GetAllApiKeys() map[string]models.ApiKey { + return db.GetAllApiKeys() +} + +// GetApiKey returns a models.ApiKey if valid or false if the ID is not valid +func GetApiKey(id string) (models.ApiKey, bool) { + return db.GetApiKey(id) +} + +// SaveApiKey saves the API key to the database +func SaveApiKey(apikey models.ApiKey) { + db.SaveApiKey(apikey) +} + +// UpdateTimeApiKey writes the content of LastUsage to the database +func UpdateTimeApiKey(apikey models.ApiKey) { + db.UpdateTimeApiKey(apikey) +} + +// DeleteApiKey deletes an API key with the given ID +func DeleteApiKey(id string) { + db.DeleteApiKey(id) +} + +// E2E Section + +// SaveEnd2EndInfo stores the encrypted e2e info +func SaveEnd2EndInfo(info models.E2EInfoEncrypted) { + info.AvailableFiles = nil + db.SaveEnd2EndInfo(info) +} + +// GetEnd2EndInfo retrieves the encrypted e2e info +func GetEnd2EndInfo() models.E2EInfoEncrypted { + info := db.GetEnd2EndInfo() + info.AvailableFiles = GetAllMetaDataIds() + return info +} + +// DeleteEnd2EndInfo resets the encrypted e2e info +func DeleteEnd2EndInfo() { + db.DeleteEnd2EndInfo() +} + +// Hotlink Section + +// GetHotlink returns the id of the file associated or false if not found +func GetHotlink(id string) (string, bool) { + return db.GetHotlink(id) +} + +// GetAllHotlinks returns an array with all hotlink ids +func GetAllHotlinks() []string { + return db.GetAllHotlinks() +} + +// SaveHotlink stores the hotlink associated with the file in the database +func SaveHotlink(file models.File) { + db.SaveHotlink(file) +} + +// DeleteHotlink deletes a hotlink with the given hotlink ID +func DeleteHotlink(id string) { + db.DeleteHotlink(id) +} + +// Metadata Section + +// GetAllMetadata returns a map of all available files +func GetAllMetadata() map[string]models.File { + return db.GetAllMetadata() +} + +// GetAllMetaDataIds returns all Ids that contain metadata +func GetAllMetaDataIds() []string { + return db.GetAllMetaDataIds() +} + +// GetMetaDataById returns a models.File from the ID passed or false if the id is not valid +func GetMetaDataById(id string) (models.File, bool) { + return db.GetMetaDataById(id) +} + +// SaveMetaData stores the metadata of a file to the disk +func SaveMetaData(file models.File) { + db.SaveMetaData(file) +} + +// DeleteMetaData deletes information about a file +func DeleteMetaData(id string) { + db.DeleteMetaData(id) +} + +// Session Section + +// GetSession returns the session with the given ID or false if not a valid ID +func GetSession(id string) (models.Session, bool) { + return db.GetSession(id) +} + +// SaveSession stores the given session. After the expiry passed, it will be deleted automatically +func SaveSession(id string, session models.Session) { + db.SaveSession(id, session) +} + +// DeleteSession deletes a session with the given ID +func DeleteSession(id string) { + db.DeleteSession(id) +} + +// DeleteAllSessions logs all users out +func DeleteAllSessions() { + db.DeleteAllSessions() +} + +// Upload Defaults Section + +// GetUploadDefaults returns the last used setting for amount of downloads allowed, last expiry in days and +// a password for the file +func GetUploadDefaults() models.LastUploadValues { + values, ok := db.GetUploadDefaults() + if ok { + return values } - sqliteDb = nil -} - -// RunGarbageCollection runs the databases GC -func RunGarbageCollection() { - cleanExpiredSessions() - cleanUploadStatus() -} - -// RawSqlite runs a raw SQL statement. Should only be used for upgrading -func RawSqlite(statement string) error { - _, err := sqliteDb.Exec(statement) - return err -} - -type schemaPragma struct { - Cid string - Name string - Type string - NotNull int - DefaultVal sql.NullString - Pk int -} - -// ColumnExists returns true if a column with the name columnName exists in table tableName -// Should only be used for upgrading -func ColumnExists(tableName, columnName string) (bool, error) { - rows, err := sqliteDb.Query("PRAGMA table_info(" + tableName + ")") - if err != nil { - return false, err + defaultValues := models.LastUploadValues{ + Downloads: 1, + TimeExpiry: 14, + Password: "", + UnlimitedDownload: false, + UnlimitedTime: false, } - defer rows.Close() - for rows.Next() { - var pragmaInfo schemaPragma - err = rows.Scan(&pragmaInfo.Cid, &pragmaInfo.Name, &pragmaInfo.Type, &pragmaInfo.NotNull, &pragmaInfo.DefaultVal, &pragmaInfo.Pk) - if err != nil { - return false, err - } - if pragmaInfo.Name == columnName { - return true, nil - } - } - return false, nil + return defaultValues } -func createNewDatabase() { - sqlStmt := ` - CREATE TABLE "ApiKeys" ( - "Id" TEXT NOT NULL UNIQUE, - "FriendlyName" TEXT NOT NULL, - "LastUsed" INTEGER NOT NULL, - "LastUsedString" TEXT NOT NULL, - "Permissions" INTEGER NOT NULL DEFAULT 0, - PRIMARY KEY("Id") - ) WITHOUT ROWID; - CREATE TABLE "E2EConfig" ( - "id" INTEGER NOT NULL UNIQUE, - "Config" BLOB NOT NULL, - PRIMARY KEY("id" AUTOINCREMENT) - ); - CREATE TABLE "FileMetaData" ( - "Id" TEXT NOT NULL UNIQUE, - "Name" TEXT NOT NULL, - "Size" TEXT NOT NULL, - "SHA1" TEXT NOT NULL, - "ExpireAt" INTEGER NOT NULL, - "SizeBytes" INTEGER NOT NULL, - "ExpireAtString" TEXT NOT NULL, - "DownloadsRemaining" INTEGER NOT NULL, - "DownloadCount" INTEGER NOT NULL, - "PasswordHash" TEXT NOT NULL, - "HotlinkId" TEXT NOT NULL, - "ContentType" TEXT NOT NULL, - "AwsBucket" TEXT NOT NULL, - "Encryption" BLOB NOT NULL, - "UnlimitedDownloads" INTEGER NOT NULL, - "UnlimitedTime" INTEGER NOT NULL, - PRIMARY KEY("Id") - ); - CREATE TABLE "Hotlinks" ( - "Id" TEXT NOT NULL UNIQUE, - "FileId" TEXT NOT NULL UNIQUE, - PRIMARY KEY("Id") - ) WITHOUT ROWID; - CREATE TABLE "Sessions" ( - "Id" TEXT NOT NULL UNIQUE, - "RenewAt" INTEGER NOT NULL, - "ValidUntil" INTEGER NOT NULL, - PRIMARY KEY("Id") - ) WITHOUT ROWID; - CREATE TABLE "UploadConfig" ( - "id" INTEGER NOT NULL UNIQUE, - "Downloads" INTEGER, - "TimeExpiry" INTEGER, - "Password" TEXT, - "UnlimitedDownloads" INTEGER, - "UnlimitedTime" INTEGER, - PRIMARY KEY("id") - ); - CREATE TABLE "UploadStatus" ( - "ChunkId" TEXT NOT NULL UNIQUE, - "CurrentStatus" INTEGER NOT NULL, - "CreationDate" INTEGER NOT NULL, - PRIMARY KEY("ChunkId") - ) WITHOUT ROWID; -` - err := RawSqlite(sqlStmt) - helper.Check(err) +// SaveUploadDefaults saves the last used setting for an upload +func SaveUploadDefaults(values models.LastUploadValues) { + db.SaveUploadDefaults(values) +} + +// Upload Status Section + +// GetAllUploadStatus returns all UploadStatus values from the past 24 hours +func GetAllUploadStatus() []models.UploadStatus { + return db.GetAllUploadStatus() +} + +// GetUploadStatus returns a models.UploadStatus from the ID passed or false if the id is not valid +func GetUploadStatus(id string) (models.UploadStatus, bool) { + return db.GetUploadStatus(id) +} + +// SaveUploadStatus stores the upload status of a new file for 24 hours +func SaveUploadStatus(status models.UploadStatus) { + db.SaveUploadStatus(status) } diff --git a/internal/configuration/database/Database_test.go b/internal/configuration/database/Database_test.go index 56f65d5..bdfb02c 100644 --- a/internal/configuration/database/Database_test.go +++ b/internal/configuration/database/Database_test.go @@ -1,524 +1,310 @@ -//go:build test - package database import ( - "database/sql" - "errors" - "github.com/DATA-DOG/go-sqlmock" - "github.com/forceu/gokapi/internal/helper" + "fmt" + "github.com/alicebob/miniredis/v2" + "github.com/forceu/gokapi/internal/configuration/database/dbabstraction" "github.com/forceu/gokapi/internal/models" "github.com/forceu/gokapi/internal/test" - "golang.org/x/exp/slices" - "math" + "log" "os" - "regexp" - "sync" "testing" "time" ) +var configSqlite = models.DbConnection{ + HostUrl: "./test/gokapi.sqlite", + Type: 0, // dbabstraction.TypeSqlite +} + +var configRedis = models.DbConnection{ + RedisPrefix: "test_", + HostUrl: "127.0.0.1:26379", + Type: 1, // dbabstraction.TypeRedis +} + +var mRedis *miniredis.Miniredis + +var availableDatabases []dbabstraction.Database + func TestMain(m *testing.M) { - os.Setenv("GOKAPI_CONFIG_DIR", "test") - os.Setenv("GOKAPI_DATA_DIR", "test") - os.Mkdir("test", 0777) + + mRedis = miniredis.NewMiniRedis() + err := mRedis.StartAddr("127.0.0.1:26379") + if err != nil { + log.Fatal("Could not start miniredis") + } exitVal := m.Run() - os.RemoveAll("test") + mRedis.Close() + os.RemoveAll("./test/") os.Exit(exitVal) } func TestInit(t *testing.T) { - test.IsEqualBool(t, sqliteDb == nil, true) - Init("./test/newfolder", "gokapi.sqlite") - test.IsEqualBool(t, sqliteDb != nil, true) - test.FolderExists(t, "./test/newfolder") - Close() - test.IsEqualBool(t, sqliteDb == nil, true) - err := os.WriteFile("./test/newfolder/gokapi2.sqlite", []byte("invalid"), 0700) - test.IsNil(t, err) - Init("./test/newfolder", "gokapi2.sqlite") + availableDatabases = make([]dbabstraction.Database, 0) + Connect(configRedis) + availableDatabases = append(availableDatabases, db) + Connect(configSqlite) + availableDatabases = append(availableDatabases, db) + defer test.ExpectPanic(t) + Connect(models.DbConnection{Type: 2}) } -func TestClose(t *testing.T) { - test.IsEqualBool(t, sqliteDb != nil, true) - Close() - test.IsEqualBool(t, sqliteDb == nil, true) - mock := setMockDb(t) - mock.ExpectClose().WillReturnError(errors.New("test")) - Close() - restoreDb() - Init("./test", "gokapi.sqlite") +func TestApiKeys(t *testing.T) { + runAllTypesCompareOutput(t, func() any { return GetAllApiKeys() }, map[string]models.ApiKey{}) + newApiKey := models.ApiKey{ + Id: "test", + FriendlyName: "testKey", + LastUsed: 1000, + Permissions: 10, + } + runAllTypesNoOutput(t, func() { SaveApiKey(newApiKey) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { + return GetApiKey("test") + }, newApiKey, true) + newApiKey.LastUsed = 2000 + runAllTypesNoOutput(t, func() { UpdateTimeApiKey(newApiKey) }) + runAllTypesCompareOutput(t, func() any { return GetAllApiKeys() }, map[string]models.ApiKey{"test": newApiKey}) + runAllTypesNoOutput(t, func() { DeleteApiKey("test") }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { + return GetApiKey("test") + }, models.ApiKey{}, false) } -func TestMetaData(t *testing.T) { - files := GetAllMetadata() - test.IsEqualInt(t, len(files), 0) - - SaveMetaData(models.File{Id: "testfile", Name: "test.txt", ExpireAt: time.Now().Add(time.Hour).Unix()}) - files = GetAllMetadata() - test.IsEqualInt(t, len(files), 1) - test.IsEqualString(t, files["testfile"].Name, "test.txt") - - file, ok := GetMetaDataById("testfile") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, file.Id, "testfile") - _, ok = GetMetaDataById("invalid") - test.IsEqualBool(t, ok, false) - - test.IsEqualInt(t, len(GetAllMetadata()), 1) - DeleteMetaData("invalid") - test.IsEqualInt(t, len(GetAllMetadata()), 1) - DeleteMetaData("testfile") - test.IsEqualInt(t, len(GetAllMetadata()), 0) +func TestE2E(t *testing.T) { + input := models.E2EInfoEncrypted{ + Version: 1, + Nonce: []byte("test"), + Content: []byte("test2"), + AvailableFiles: []string{"should", "not", "be", "saved"}, + } + runAllTypesNoOutput(t, func() { SaveEnd2EndInfo(input) }) + input.AvailableFiles = []string{} + runAllTypesCompareOutput(t, func() any { return GetEnd2EndInfo() }, input) + runAllTypesNoOutput(t, func() { DeleteEnd2EndInfo() }) + runAllTypesCompareOutput(t, func() any { return GetEnd2EndInfo() }, models.E2EInfoEncrypted{AvailableFiles: []string{}}) } -func TestHotlink(t *testing.T) { - SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: time.Now().Add(time.Hour).Unix()}) - - hotlink, ok := GetHotlink("testlink") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, hotlink, "testfile") - _, ok = GetHotlink("invalid") - test.IsEqualBool(t, ok, false) - - DeleteHotlink("invalid") - _, ok = GetHotlink("testlink") - test.IsEqualBool(t, ok, true) - DeleteHotlink("testlink") - _, ok = GetHotlink("testlink") - test.IsEqualBool(t, ok, false) - - SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: 0, UnlimitedTime: true}) - hotlink, ok = GetHotlink("testlink") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, hotlink, "testfile") - - SaveHotlink(models.File{Id: "file2", Name: "file2.txt", HotlinkId: "link2", ExpireAt: time.Now().Add(time.Hour).Unix()}) - SaveHotlink(models.File{Id: "file3", Name: "file3.txt", HotlinkId: "link3", ExpireAt: time.Now().Add(time.Hour).Unix()}) - - hotlinks := GetAllHotlinks() - test.IsEqualInt(t, len(hotlinks), 3) - test.IsEqualBool(t, slices.Contains(hotlinks, "testlink"), true) - test.IsEqualBool(t, slices.Contains(hotlinks, "link2"), true) - test.IsEqualBool(t, slices.Contains(hotlinks, "link3"), true) - DeleteHotlink("") - hotlinks = GetAllHotlinks() - test.IsEqualInt(t, len(hotlinks), 3) - -} - -func TestApiKey(t *testing.T) { - SaveApiKey(models.ApiKey{ - Id: "newkey", - FriendlyName: "New Key", - LastUsedString: "LastUsed", - LastUsed: 100, - Permissions: 20, - }) - SaveApiKey(models.ApiKey{ - Id: "newkey2", - FriendlyName: "New Key2", - LastUsedString: "LastUsed2", - LastUsed: 200, - Permissions: 40, - }) - - keys := GetAllApiKeys() - test.IsEqualInt(t, len(keys), 2) - test.IsEqualString(t, keys["newkey"].FriendlyName, "New Key") - test.IsEqualString(t, keys["newkey"].Id, "newkey") - test.IsEqualString(t, keys["newkey"].LastUsedString, "LastUsed") - test.IsEqualInt64(t, keys["newkey"].LastUsed, 100) - test.IsEqualBool(t, keys["newkey"].Permissions == 20, true) - - test.IsEqualInt(t, len(GetAllApiKeys()), 2) - DeleteApiKey("newkey2") - test.IsEqualInt(t, len(GetAllApiKeys()), 1) - - key, ok := GetApiKey("newkey") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, key.FriendlyName, "New Key") - _, ok = GetApiKey("newkey2") - test.IsEqualBool(t, ok, false) - - SaveApiKey(models.ApiKey{ - Id: "newkey", - FriendlyName: "Old Key", - LastUsed: 100, - LastUsedString: "LastUsed", - }) - key, ok = GetApiKey("newkey") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, key.FriendlyName, "Old Key") -} - -func TestSession(t *testing.T) { - renewAt := time.Now().Add(1 * time.Hour).Unix() - SaveSession("newsession", models.Session{ - RenewAt: renewAt, - ValidUntil: time.Now().Add(2 * time.Hour).Unix(), - }) - - session, ok := GetSession("newsession") - test.IsEqualBool(t, ok, true) - test.IsEqualBool(t, session.RenewAt == renewAt, true) - - DeleteSession("newsession") - _, ok = GetSession("newsession") - test.IsEqualBool(t, ok, false) - - SaveSession("newsession", models.Session{ - RenewAt: renewAt, - ValidUntil: time.Now().Add(2 * time.Hour).Unix(), - }) - - SaveSession("anothersession", models.Session{ - RenewAt: renewAt, - ValidUntil: time.Now().Add(2 * time.Hour).Unix(), - }) - _, ok = GetSession("newsession") - test.IsEqualBool(t, ok, true) - _, ok = GetSession("anothersession") - test.IsEqualBool(t, ok, true) - - DeleteAllSessions() - _, ok = GetSession("newsession") - test.IsEqualBool(t, ok, false) - _, ok = GetSession("anothersession") - test.IsEqualBool(t, ok, false) +func TestSessions(t *testing.T) { + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetSession("newsession") }, models.Session{}, false) + input := models.Session{ + RenewAt: time.Now().Add(10 * time.Second).Unix(), + ValidUntil: time.Now().Add(20 * time.Second).Unix(), + } + runAllTypesNoOutput(t, func() { SaveSession("newsession", input) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetSession("newsession") }, input, true) + runAllTypesNoOutput(t, func() { DeleteSession("newsession") }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetSession("newsession") }, models.Session{}, false) + runAllTypesNoOutput(t, func() { SaveSession("newsession", input) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetSession("newsession") }, input, true) + runAllTypesNoOutput(t, func() { DeleteAllSessions() }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetSession("newsession") }, models.Session{}, false) } func TestUploadDefaults(t *testing.T) { - defaults := GetUploadDefaults() - test.IsEqualInt(t, defaults.Downloads, 1) - test.IsEqualInt(t, defaults.TimeExpiry, 14) - test.IsEqualString(t, defaults.Password, "") - test.IsEqualBool(t, defaults.UnlimitedDownload, false) - test.IsEqualBool(t, defaults.UnlimitedTime, false) - - SaveUploadDefaults(models.LastUploadValues{ - Downloads: 20, - TimeExpiry: 30, - Password: "abcd", + defaultValues := models.LastUploadValues{ + Downloads: 1, + TimeExpiry: 14, + Password: "", + UnlimitedDownload: false, + UnlimitedTime: false, + } + runAllTypesCompareOutput(t, func() any { return GetUploadDefaults() }, defaultValues) + newValues := models.LastUploadValues{ + Downloads: 5, + TimeExpiry: 20, + Password: "123", UnlimitedDownload: true, UnlimitedTime: true, - }) - defaults = GetUploadDefaults() - test.IsEqualInt(t, defaults.Downloads, 20) - test.IsEqualInt(t, defaults.TimeExpiry, 30) - test.IsEqualString(t, defaults.Password, "abcd") - test.IsEqualBool(t, defaults.UnlimitedDownload, true) - test.IsEqualBool(t, defaults.UnlimitedTime, true) -} - -func TestColumnExists(t *testing.T) { - exists, err := ColumnExists("invalid", "invalid") - test.IsEqualBool(t, exists, false) - test.IsNil(t, err) - exists, err = ColumnExists("FileMetaData", "invalid") - test.IsEqualBool(t, exists, false) - test.IsNil(t, err) - exists, err = ColumnExists("FileMetaData", "ExpireAt") - test.IsEqualBool(t, exists, true) - test.IsNil(t, err) - setMockDb(t).ExpectQuery(regexp.QuoteMeta("PRAGMA table_info(error)")).WillReturnError(errors.New("error")) - exists, err = ColumnExists("error", "error") - test.IsEqualBool(t, exists, false) - test.IsNotNil(t, err) - restoreDb() - mock := setMockDb(t) - - rows := mock.NewRows([]string{"invalid"}). - AddRow(0). - AddRow(1) - mock.ExpectQuery(regexp.QuoteMeta("PRAGMA table_info(error)")).WillReturnRows(rows) - exists, err = ColumnExists("error", "error") - test.IsEqualBool(t, exists, false) - test.IsNotNil(t, err) - restoreDb() -} - -func TestGarbageCollectionUploads(t *testing.T) { - orgiginalFunc := currentTime - currentTime = func() time.Time { - return time.Now().Add(-25 * time.Hour) } - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctodelete1", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctodelete2", - CurrentStatus: 1, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctodelete3", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctodelete4", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctodelete5", - CurrentStatus: 1, - }) - currentTime = orgiginalFunc - - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctokeep1", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctokeep2", - CurrentStatus: 1, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctokeep3", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctokeep4", - CurrentStatus: 0, - }) - SaveUploadStatus(models.UploadStatus{ - ChunkId: "ctokeep5", - CurrentStatus: 1, - }) - for _, item := range []string{"ctodelete1", "ctodelete2", "ctodelete3", "ctodelete4", "ctokeep1", "ctokeep2", "ctokeep3", "ctokeep4"} { - _, result := GetUploadStatus(item) - test.IsEqualBool(t, result, true) - } - RunGarbageCollection() - for _, item := range []string{"ctodelete1", "ctodelete2", "ctodelete3", "ctodelete4"} { - _, result := GetUploadStatus(item) - test.IsEqualBool(t, result, false) - } - for _, item := range []string{"ctokeep1", "ctokeep2", "ctokeep3", "ctokeep4"} { - _, result := GetUploadStatus(item) - test.IsEqualBool(t, result, true) - } -} - -func TestGarbageCollectionSessions(t *testing.T) { - SaveSession("todelete1", models.Session{ - RenewAt: time.Now().Add(-10 * time.Second).Unix(), - ValidUntil: time.Now().Add(-10 * time.Second).Unix(), - }) - SaveSession("todelete2", models.Session{ - RenewAt: time.Now().Add(10 * time.Second).Unix(), - ValidUntil: time.Now().Add(-10 * time.Second).Unix(), - }) - SaveSession("tokeep1", models.Session{ - RenewAt: time.Now().Add(-10 * time.Second).Unix(), - ValidUntil: time.Now().Add(10 * time.Second).Unix(), - }) - SaveSession("tokeep2", models.Session{ - RenewAt: time.Now().Add(10 * time.Second).Unix(), - ValidUntil: time.Now().Add(10 * time.Second).Unix(), - }) - for _, item := range []string{"todelete1", "todelete2", "tokeep1", "tokeep2"} { - _, result := GetSession(item) - test.IsEqualBool(t, result, true) - } - RunGarbageCollection() - for _, item := range []string{"todelete1", "todelete2"} { - _, result := GetSession(item) - test.IsEqualBool(t, result, false) - } - for _, item := range []string{"tokeep1", "tokeep2"} { - _, result := GetSession(item) - test.IsEqualBool(t, result, true) - } -} - -func TestEnd2EndInfo(t *testing.T) { - info := GetEnd2EndInfo() - test.IsEqualInt(t, info.Version, 0) - test.IsEqualBool(t, info.HasBeenSetUp(), false) - - SaveEnd2EndInfo(models.E2EInfoEncrypted{ - Version: 1, - Nonce: []byte("testNonce1"), - Content: []byte("testContent1"), - AvailableFiles: []string{"file1_0", "file1_1"}, - }) - - info = GetEnd2EndInfo() - test.IsEqualInt(t, info.Version, 1) - test.IsEqualBool(t, info.HasBeenSetUp(), true) - test.IsEqualByteSlice(t, info.Nonce, []byte("testNonce1")) - test.IsEqualByteSlice(t, info.Content, []byte("testContent1")) - test.IsEqualBool(t, len(info.AvailableFiles) == 0, true) - - SaveEnd2EndInfo(models.E2EInfoEncrypted{ - Version: 2, - Nonce: []byte("testNonce2"), - Content: []byte("testContent2"), - AvailableFiles: []string{"file2_0", "file2_1"}, - }) - - info = GetEnd2EndInfo() - test.IsEqualInt(t, info.Version, 2) - test.IsEqualBool(t, info.HasBeenSetUp(), true) - test.IsEqualByteSlice(t, info.Nonce, []byte("testNonce2")) - test.IsEqualByteSlice(t, info.Content, []byte("testContent2")) - test.IsEqualBool(t, len(info.AvailableFiles) == 0, true) - - DeleteEnd2EndInfo() - info = GetEnd2EndInfo() - test.IsEqualInt(t, info.Version, 0) - test.IsEqualBool(t, info.HasBeenSetUp(), false) -} - -func TestUpdateTimeApiKey(t *testing.T) { - - retrievedKey, ok := GetApiKey("key1") - test.IsEqualBool(t, ok, false) - test.IsEqualString(t, retrievedKey.Id, "") - - key := models.ApiKey{ - Id: "key1", - FriendlyName: "key1", - LastUsed: 100, - LastUsedString: "last1", - } - SaveApiKey(key) - key = models.ApiKey{ - Id: "key2", - FriendlyName: "key2", - LastUsed: 200, - LastUsedString: "last2", - } - SaveApiKey(key) - - retrievedKey, ok = GetApiKey("key1") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedKey.Id, "key1") - test.IsEqualInt64(t, retrievedKey.LastUsed, 100) - test.IsEqualString(t, retrievedKey.LastUsedString, "last1") - retrievedKey, ok = GetApiKey("key2") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedKey.Id, "key2") - test.IsEqualInt64(t, retrievedKey.LastUsed, 200) - test.IsEqualString(t, retrievedKey.LastUsedString, "last2") - - key.LastUsed = 300 - key.LastUsedString = "last2_1" - UpdateTimeApiKey(key) - - retrievedKey, ok = GetApiKey("key1") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedKey.Id, "key1") - test.IsEqualInt64(t, retrievedKey.LastUsed, 100) - test.IsEqualString(t, retrievedKey.LastUsedString, "last1") - retrievedKey, ok = GetApiKey("key2") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedKey.Id, "key2") - test.IsEqualInt64(t, retrievedKey.LastUsed, 300) - test.IsEqualString(t, retrievedKey.LastUsedString, "last2_1") -} - -func TestParallelConnectionsWritingAndReading(t *testing.T) { - var wg sync.WaitGroup - - simulatedConnection := func(t *testing.T) { - file := models.File{ - Id: helper.GenerateRandomString(10), - Name: helper.GenerateRandomString(10), - Size: "10B", - SHA1: "1289423794287598237489", - ExpireAt: math.MaxInt, - SizeBytes: 10, - ExpireAtString: "Never", - DownloadsRemaining: 10, - DownloadCount: 10, - PasswordHash: "", - HotlinkId: "", - ContentType: "", - AwsBucket: "", - Encryption: models.EncryptionInfo{}, - UnlimitedDownloads: false, - UnlimitedTime: false, - } - SaveMetaData(file) - retrievedFile, ok := GetMetaDataById(file.Id) - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedFile.Name, file.Name) - DeleteMetaData(file.Id) - _, ok = GetMetaDataById(file.Id) - test.IsEqualBool(t, ok, false) - } - - for i := 1; i <= 100; i++ { - wg.Add(1) - go func() { - defer wg.Done() - simulatedConnection(t) - }() - } - wg.Wait() -} - -func TestParallelConnectionsReading(t *testing.T) { - var wg sync.WaitGroup - - SaveApiKey(models.ApiKey{ - Id: "readtest", - FriendlyName: "readtest", - LastUsed: 40000, - LastUsedString: "readtest", - }) - simulatedConnection := func(t *testing.T) { - _, ok := GetApiKey("readtest") - test.IsEqualBool(t, ok, true) - } - - for i := 1; i <= 1000; i++ { - wg.Add(1) - go func() { - defer wg.Done() - simulatedConnection(t) - }() - } - wg.Wait() + runAllTypesNoOutput(t, func() { SaveUploadDefaults(newValues) }) + runAllTypesCompareOutput(t, func() any { return GetUploadDefaults() }, newValues) } func TestUploadStatus(t *testing.T) { - allStatus := GetAllUploadStatus() - found := false - test.IsEqualInt(t, len(allStatus), 5) - for _, status := range allStatus { - if status.ChunkId == "ctokeep5" { - found = true - } - } - test.IsEqualBool(t, found, true) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetUploadStatus("newstatus") }, models.UploadStatus{}, false) + runAllTypesCompareOutput(t, func() any { return GetAllUploadStatus() }, []models.UploadStatus{}) newStatus := models.UploadStatus{ - ChunkId: "testid", + ChunkId: "newstatus", CurrentStatus: 1, } - retrievedStatus, ok := GetUploadStatus("testid") - test.IsEqualBool(t, ok, false) - test.IsEqualBool(t, retrievedStatus == models.UploadStatus{}, true) - SaveUploadStatus(newStatus) - retrievedStatus, ok = GetUploadStatus("testid") - test.IsEqualBool(t, ok, true) - test.IsEqualString(t, retrievedStatus.ChunkId, "testid") - test.IsEqualInt(t, retrievedStatus.CurrentStatus, 1) - allStatus = GetAllUploadStatus() - test.IsEqualInt(t, len(allStatus), 6) + runAllTypesNoOutput(t, func() { SaveUploadStatus(newStatus) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetUploadStatus("newstatus") }, newStatus, true) + runAllTypesCompareOutput(t, func() any { return GetAllUploadStatus() }, []models.UploadStatus{newStatus}) } -var originalDb *sql.DB +func TestHotlinks(t *testing.T) { + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetHotlink("newhotlink") }, "", false) + newFile := models.File{Id: "testfile", + HotlinkId: "newhotlink"} + runAllTypesNoOutput(t, func() { SaveHotlink(newFile) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetHotlink("newhotlink") }, "testfile", true) + runAllTypesCompareOutput(t, func() any { return GetAllHotlinks() }, []string{"newhotlink"}) + runAllTypesNoOutput(t, func() { DeleteHotlink("newhotlink") }) + runAllTypesCompareOutput(t, func() any { return GetAllHotlinks() }, []string{}) +} -func setMockDb(t *testing.T) sqlmock.Sqlmock { - originalDb = sqliteDb - db, mock, err := sqlmock.New() +func TestMetaData(t *testing.T) { + runAllTypesCompareOutput(t, func() any { return GetAllMetaDataIds() }, []string{}) + runAllTypesCompareOutput(t, func() any { return GetAllMetadata() }, map[string]models.File{}) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetMetaDataById("testid") }, models.File{}, false) + file := models.File{ + Id: "testid", + Name: "Testname", + Size: "3Kb", + SHA1: "12345556", + PasswordHash: "sfffwefwe", + HotlinkId: "hotlink", + ContentType: "none", + AwsBucket: "aws1", + ExpireAtString: "In 10 seconds", + ExpireAt: time.Now().Add(10 * time.Second).Unix(), + SizeBytes: 3 * 1024, + DownloadsRemaining: 2, + DownloadCount: 5, + Encryption: models.EncryptionInfo{ + IsEncrypted: true, + IsEndToEndEncrypted: true, + DecryptionKey: []byte("dekey"), + Nonce: []byte("nonce"), + }, + UnlimitedDownloads: true, + UnlimitedTime: true, + } + runAllTypesNoOutput(t, func() { SaveMetaData(file) }) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetMetaDataById("testid") }, file, true) + runAllTypesCompareOutput(t, func() any { return GetAllMetaDataIds() }, []string{"testid"}) + runAllTypesCompareOutput(t, func() any { return GetAllMetadata() }, map[string]models.File{"testid": file}) + runAllTypesNoOutput(t, func() { DeleteMetaData("testid") }) + runAllTypesCompareOutput(t, func() any { return GetAllMetaDataIds() }, []string{}) + runAllTypesCompareOutput(t, func() any { return GetAllMetadata() }, map[string]models.File{}) + runAllTypesCompareTwoOutputs(t, func() (any, any) { return GetMetaDataById("testid") }, models.File{}, false) +} + +func TestUpgrade(t *testing.T) { + actualDbVersion := currentDbVersion + currentDbVersion = 99 + runAllTypesNoOutput(t, func() { db.SetDbVersion(1) }) + runAllTypesNoOutput(t, func() { Upgrade() }) + runAllTypesNoOutput(t, func() { test.IsEqualInt(t, db.GetDbVersion(), 99) }) + currentDbVersion = actualDbVersion +} + +func TestRunGarbageCollection(t *testing.T) { + runAllTypesNoOutput(t, func() { RunGarbageCollection() }) +} + +func TestClose(t *testing.T) { + runAllTypesNoOutput(t, func() { Close() }) +} + +func runAllTypesNoOutput(t *testing.T, functionToRun func()) { + t.Helper() + for _, database := range availableDatabases { + db = database + functionToRun() + } +} + +func runAllTypesCompareOutput(t *testing.T, functionToRun func() any, expectedOutput any) { + t.Helper() + for _, database := range availableDatabases { + db = database + output := functionToRun() + test.IsEqual(t, output, expectedOutput) + } +} + +func runAllTypesCompareTwoOutputs(t *testing.T, functionToRun func() (any, any), expectedOutput1, expectedOutput2 any) { + t.Helper() + for _, database := range availableDatabases { + db = database + output1, output2 := functionToRun() + test.IsEqual(t, output1, expectedOutput1) + test.IsEqual(t, output2, expectedOutput2) + } +} + +func TestParseUrl(t *testing.T) { + expectedOutput := models.DbConnection{} + output, err := ParseUrl("invalid", false) + test.IsNotNil(t, err) + test.IsEqual(t, output, expectedOutput) + + _, err = ParseUrl("", false) + test.IsNotNil(t, err) + _, err = ParseUrl("inv\r\nalid", false) + test.IsNotNil(t, err) + _, err = ParseUrl("", false) + test.IsNotNil(t, err) + + expectedOutput = models.DbConnection{ + HostUrl: "./test", + Type: dbabstraction.TypeSqlite, + } + output, err = ParseUrl("sqlite://./test", false) test.IsNil(t, err) - sqliteDb = db - return mock + test.IsEqual(t, output, expectedOutput) + + _, err = ParseUrl("sqlite:///invalid", true) + test.IsNotNil(t, err) + output, err = ParseUrl("sqlite:///invalid", false) + test.IsNil(t, err) + test.IsEqualString(t, output.HostUrl, "/invalid") + + expectedOutput = models.DbConnection{ + HostUrl: "127.0.0.1:1234", + RedisPrefix: "", + Username: "", + Password: "", + RedisUseSsl: false, + Type: dbabstraction.TypeRedis, + } + output, err = ParseUrl("redis://127.0.0.1:1234", false) + test.IsNil(t, err) + test.IsEqual(t, output, expectedOutput) + + expectedOutput = models.DbConnection{ + HostUrl: "127.0.0.1:1234", + RedisPrefix: "tpref", + Username: "tuser", + Password: "tpw", + RedisUseSsl: true, + Type: dbabstraction.TypeRedis, + } + output, err = ParseUrl("redis://tuser:tpw@127.0.0.1:1234/?ssl=true&prefix=tpref", false) + test.IsNil(t, err) + test.IsEqual(t, output, expectedOutput) } -func restoreDb() { - sqliteDb = originalDb + +func TestMigration(t *testing.T) { + configNew := models.DbConnection{ + RedisPrefix: "testmigrate_", + HostUrl: "127.0.0.1:26379", + Type: 1, // dbabstraction.TypeRedis + } + dbOld, err := dbabstraction.GetNew(configSqlite) + test.IsNil(t, err) + testFile := models.File{Id: "file1234", HotlinkId: "hotlink123"} + dbOld.SaveMetaData(testFile) + dbOld.SaveHotlink(testFile) + dbOld.SaveApiKey(models.ApiKey{Id: "api123"}) + dbOld.SaveUploadDefaults(models.LastUploadValues{Password: "pw123"}) + dbOld.SaveHotlink(testFile) + dbOld.Close() + + Migrate(configSqlite, configNew) + + dbNew, err := dbabstraction.GetNew(configNew) + test.IsNil(t, err) + _, ok := dbNew.GetHotlink("hotlink123") + test.IsEqualBool(t, ok, true) + _, ok = dbNew.GetApiKey("api123") + test.IsEqualBool(t, ok, true) + defaults, ok := dbNew.GetUploadDefaults() + test.IsEqualBool(t, ok, true) + fmt.Printf("defaults: %+v\n", defaults) + test.IsEqualString(t, defaults.Password, "pw123") + _, ok = dbNew.GetMetaDataById("file1234") + test.IsEqualBool(t, ok, true) } diff --git a/internal/configuration/database/apikeys.go b/internal/configuration/database/apikeys.go deleted file mode 100644 index aa7385f..0000000 --- a/internal/configuration/database/apikeys.go +++ /dev/null @@ -1,82 +0,0 @@ -package database - -import ( - "database/sql" - "errors" - "github.com/forceu/gokapi/internal/helper" - "github.com/forceu/gokapi/internal/models" -) - -type schemaApiKeys struct { - Id string - FriendlyName string - LastUsed int64 - LastUsedString string - Permissions int -} - -// GetAllApiKeys returns a map with all API keys -func GetAllApiKeys() map[string]models.ApiKey { - result := make(map[string]models.ApiKey) - - rows, err := sqliteDb.Query("SELECT * FROM ApiKeys") - helper.Check(err) - defer rows.Close() - for rows.Next() { - rowData := schemaApiKeys{} - err = rows.Scan(&rowData.Id, &rowData.FriendlyName, &rowData.LastUsed, &rowData.LastUsedString, &rowData.Permissions) - helper.Check(err) - result[rowData.Id] = models.ApiKey{ - Id: rowData.Id, - FriendlyName: rowData.FriendlyName, - LastUsed: rowData.LastUsed, - LastUsedString: rowData.LastUsedString, - Permissions: uint8(rowData.Permissions), - } - } - return result -} - -// GetApiKey returns a models.ApiKey if valid or false if the ID is not valid -func GetApiKey(id string) (models.ApiKey, bool) { - var rowResult schemaApiKeys - row := sqliteDb.QueryRow("SELECT * FROM ApiKeys WHERE Id = ?", id) - err := row.Scan(&rowResult.Id, &rowResult.FriendlyName, &rowResult.LastUsed, &rowResult.LastUsedString, &rowResult.Permissions) - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - return models.ApiKey{}, false - } - helper.Check(err) - return models.ApiKey{}, false - } - - result := models.ApiKey{ - Id: rowResult.Id, - FriendlyName: rowResult.FriendlyName, - LastUsed: rowResult.LastUsed, - LastUsedString: rowResult.LastUsedString, - Permissions: uint8(rowResult.Permissions), - } - - return result, true -} - -// SaveApiKey saves the API key to the database -func SaveApiKey(apikey models.ApiKey) { - _, err := sqliteDb.Exec("INSERT OR REPLACE INTO ApiKeys (Id, FriendlyName, LastUsed, LastUsedString, Permissions) VALUES (?, ?, ?, ?, ?)", - apikey.Id, apikey.FriendlyName, apikey.LastUsed, apikey.LastUsedString, apikey.Permissions) - helper.Check(err) -} - -// UpdateTimeApiKey writes the content of LastUsage to the database -func UpdateTimeApiKey(apikey models.ApiKey) { - _, err := sqliteDb.Exec("UPDATE ApiKeys SET LastUsed = ?, LastUsedString = ? WHERE Id = ?", - apikey.LastUsed, apikey.LastUsedString, apikey.Id) - helper.Check(err) -} - -// DeleteApiKey deletes an API key with the given ID -func DeleteApiKey(id string) { - _, err := sqliteDb.Exec("DELETE FROM ApiKeys WHERE Id = ?", id) - helper.Check(err) -} diff --git a/internal/configuration/database/dbabstraction/DbAbstraction.go b/internal/configuration/database/dbabstraction/DbAbstraction.go new file mode 100644 index 0000000..514c543 --- /dev/null +++ b/internal/configuration/database/dbabstraction/DbAbstraction.go @@ -0,0 +1,104 @@ +package dbabstraction + +import ( + "fmt" + "github.com/forceu/gokapi/internal/configuration/database/provider/redis" + "github.com/forceu/gokapi/internal/configuration/database/provider/sqlite" + "github.com/forceu/gokapi/internal/models" +) + +const ( + // TypeSqlite specifies to use an SQLite database + TypeSqlite = iota + // TypeRedis specifies to use a Redis database + TypeRedis +) + +// Database declares the required functions for a database connection +type Database interface { + // GetType returns identifier of the underlying interface + GetType() int + + // Upgrade migrates the DB to a new Gokapi version, if required + Upgrade(currentDbVersion int) + // RunGarbageCollection runs the databases GC + RunGarbageCollection() + // Close the database connection + Close() + + // GetDbVersion gets the version number of the database + GetDbVersion() int + // SetDbVersion sets the version number of the database + SetDbVersion(newVersion int) + + // GetAllApiKeys returns a map with all API keys + GetAllApiKeys() map[string]models.ApiKey + // GetApiKey returns a models.ApiKey if valid or false if the ID is not valid + GetApiKey(id string) (models.ApiKey, bool) + // SaveApiKey saves the API key to the database + SaveApiKey(apikey models.ApiKey) + // UpdateTimeApiKey writes the content of LastUsage to the database + UpdateTimeApiKey(apikey models.ApiKey) + // DeleteApiKey deletes an API key with the given ID + DeleteApiKey(id string) + + // SaveEnd2EndInfo stores the encrypted e2e info + SaveEnd2EndInfo(info models.E2EInfoEncrypted) + // GetEnd2EndInfo retrieves the encrypted e2e info + GetEnd2EndInfo() models.E2EInfoEncrypted + // DeleteEnd2EndInfo resets the encrypted e2e info + DeleteEnd2EndInfo() + + // GetHotlink returns the id of the file associated or false if not found + GetHotlink(id string) (string, bool) + // GetAllHotlinks returns an array with all hotlink ids + GetAllHotlinks() []string + // SaveHotlink stores the hotlink associated with the file in the database + SaveHotlink(file models.File) + // DeleteHotlink deletes a hotlink with the given hotlink ID + DeleteHotlink(id string) + + // GetAllMetadata returns a map of all available files + GetAllMetadata() map[string]models.File + // GetAllMetaDataIds returns all Ids that contain metadata + GetAllMetaDataIds() []string + // GetMetaDataById returns a models.File from the ID passed or false if the id is not valid + GetMetaDataById(id string) (models.File, bool) + // SaveMetaData stores the metadata of a file to the disk + SaveMetaData(file models.File) + // DeleteMetaData deletes information about a file + DeleteMetaData(id string) + + // GetSession returns the session with the given ID or false if not a valid ID + GetSession(id string) (models.Session, bool) + // SaveSession stores the given session. After the expiry passed, it will be deleted automatically + SaveSession(id string, session models.Session) + // DeleteSession deletes a session with the given ID + DeleteSession(id string) + // DeleteAllSessions logs all users out + DeleteAllSessions() + + // GetUploadDefaults returns the last used setting for amount of downloads allowed, last expiry in days and + // a password for the file + GetUploadDefaults() (models.LastUploadValues, bool) + // SaveUploadDefaults saves the last used setting for an upload + SaveUploadDefaults(values models.LastUploadValues) + // GetUploadStatus returns a models.UploadStatus from the ID passed or false if the id is not valid + GetUploadStatus(id string) (models.UploadStatus, bool) + // GetAllUploadStatus returns all UploadStatus values from the past 24 hours + GetAllUploadStatus() []models.UploadStatus + // SaveUploadStatus stores the upload status of a new file for 24 hours + SaveUploadStatus(status models.UploadStatus) +} + +// GetNew connects to the given database and initialises it +func GetNew(config models.DbConnection) (Database, error) { + switch config.Type { + case TypeSqlite: + return sqlite.New(config) + case TypeRedis: + return redis.New(config) + default: + return nil, fmt.Errorf("unsupported database: type %v", config.Type) + } +} diff --git a/internal/configuration/database/dbabstraction/DbAbstraction_test.go b/internal/configuration/database/dbabstraction/DbAbstraction_test.go new file mode 100644 index 0000000..b7ac20c --- /dev/null +++ b/internal/configuration/database/dbabstraction/DbAbstraction_test.go @@ -0,0 +1,27 @@ +package dbabstraction + +import ( + "github.com/forceu/gokapi/internal/models" + "github.com/forceu/gokapi/internal/test" + "testing" +) + +var configSqlite = models.DbConnection{ + Type: 0, // dbabstraction.TypeSqlite +} + +var configRedis = models.DbConnection{ + Type: 1, // dbabstraction.TypeRedis +} + +func TestGetNew(t *testing.T) { + result, err := GetNew(configSqlite) + test.IsNotNil(t, err) + test.IsEqualInt(t, result.GetType(), 0) + result, err = GetNew(configRedis) + test.IsNotNil(t, err) + test.IsEqualInt(t, result.GetType(), 1) + + _, err = GetNew(models.DbConnection{Type: 2}) + test.IsNotNil(t, err) +} diff --git a/internal/configuration/database/migration/Migration.go b/internal/configuration/database/migration/Migration.go new file mode 100644 index 0000000..463680d --- /dev/null +++ b/internal/configuration/database/migration/Migration.go @@ -0,0 +1,40 @@ +package migration + +import ( + "fmt" + "github.com/forceu/gokapi/internal/configuration/database" + "github.com/forceu/gokapi/internal/configuration/database/dbabstraction" + "github.com/forceu/gokapi/internal/environment/flagparser" + "os" +) + +// Do checks the passed flags for a migration and then executes it +func Do(flags flagparser.MigrateFlags) { + oldDb, err := database.ParseUrl(flags.Source, true) + if err != nil { + fmt.Println("Error: " + err.Error()) + osExit(1) + return + } + newDb, err := database.ParseUrl(flags.Destination, false) + if err != nil { + fmt.Println(err.Error()) + osExit(2) + return + } + fmt.Printf("Migrating %s database %s to %s database %s\n", getType(oldDb.Type), oldDb.HostUrl, getType(newDb.Type), newDb.HostUrl) + database.Migrate(oldDb, newDb) +} + +func getType(input int) string { + switch input { + case dbabstraction.TypeSqlite: + return "SQLite" + case dbabstraction.TypeRedis: + return "Redis" + } + return "Invalid" +} + +// Declared for testing +var osExit = os.Exit diff --git a/internal/configuration/database/migration/Migration_test.go b/internal/configuration/database/migration/Migration_test.go new file mode 100644 index 0000000..a6a949f --- /dev/null +++ b/internal/configuration/database/migration/Migration_test.go @@ -0,0 +1,75 @@ +package migration + +import ( + "github.com/forceu/gokapi/internal/configuration" + "github.com/forceu/gokapi/internal/configuration/database" + "github.com/forceu/gokapi/internal/configuration/database/dbabstraction" + "github.com/forceu/gokapi/internal/environment/flagparser" + "github.com/forceu/gokapi/internal/test" + "github.com/forceu/gokapi/internal/test/testconfiguration" + "os" + "testing" +) + +func TestMain(m *testing.M) { + testconfiguration.Create(false) + exitVal := m.Run() + testconfiguration.Delete() + os.Exit(exitVal) +} + +func TestGetType(t *testing.T) { + test.IsEqualString(t, getType(dbabstraction.TypeSqlite), "SQLite") + test.IsEqualString(t, getType(dbabstraction.TypeRedis), "Redis") + test.IsEqualString(t, getType(2), "Invalid") +} + +var exitCode int + +func TestMigration(t *testing.T) { + osExit = func(code int) { exitCode = code } + Do(flagparser.MigrateFlags{ + Source: "", + Destination: "sqlite://ignore", + }) + test.IsEqualInt(t, exitCode, 1) + exitCode = 0 + + Do(flagparser.MigrateFlags{ + Source: "sqlite://./tempfile", + Destination: "", + }) + test.IsEqualInt(t, exitCode, 1) + exitCode = 0 + + err := os.WriteFile("tempfile", []byte("ignore"), 777) + test.IsNil(t, err) + Do(flagparser.MigrateFlags{ + Source: "sqlite://./tempfile", + Destination: "", + }) + test.IsEqualInt(t, exitCode, 2) + exitCode = 0 + + err = os.Remove("tempfile") + test.IsNil(t, err) + + dbUrl := testconfiguration.GetSqliteUrl() + dbUrlNew := dbUrl + "2" + Do(flagparser.MigrateFlags{ + Source: dbUrl, + Destination: dbUrlNew, + }) + err = os.Setenv("GOKAPI_DATABASE_URL", dbUrlNew) + test.IsNil(t, err) + configuration.Load() + configuration.ConnectDatabase() + _, ok := database.GetHotlink("PhSs6mFtf8O5YGlLMfNw9rYXx9XRNkzCnJZpQBi7inunv3Z4A.jpg") + test.IsEqualBool(t, ok, true) + _, ok = database.GetApiKey("validkey") + test.IsEqualBool(t, ok, true) + defaults := database.GetUploadDefaults() + test.IsEqualString(t, defaults.Password, "123") + _, ok = database.GetMetaDataById("Wzol7LyY2QVczXynJtVo") + test.IsEqualBool(t, ok, true) +} diff --git a/internal/configuration/database/provider/redis/Redis.go b/internal/configuration/database/provider/redis/Redis.go new file mode 100644 index 0000000..3479a73 --- /dev/null +++ b/internal/configuration/database/provider/redis/Redis.go @@ -0,0 +1,257 @@ +package redis + +import ( + "errors" + "fmt" + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" + "strconv" + "strings" + "time" +) + +// DatabaseProvider contains the database instance +type DatabaseProvider struct { + pool *redigo.Pool + dbPrefix string +} + +// New returns an instance +func New(dbConfig models.DbConnection) (DatabaseProvider, error) { + return DatabaseProvider{}.init(dbConfig) +} + +// GetType returns 1, for being a Redis interface +func (p DatabaseProvider) GetType() int { + return 1 // dbabstraction.Redis +} + +// Init connects to the database and creates the table structure, if necessary +// IMPORTANT: The function returns itself, as Go does not allow this function to be pointer-based +// The resulting new reference must then be used. +func (p DatabaseProvider) init(config models.DbConnection) (DatabaseProvider, error) { + if config.HostUrl == "" { + return DatabaseProvider{}, errors.New("empty database url was provided") + } + p.dbPrefix = config.RedisPrefix + p.pool = newPool(config) + conn := p.pool.Get() + defer conn.Close() + _, err := redigo.String(conn.Do("PING")) + return p, err +} + +func getDialOptions(config models.DbConnection) []redigo.DialOption { + dialOptions := []redigo.DialOption{redigo.DialClientName("gokapi")} + if config.Username != "" { + dialOptions = append(dialOptions, redigo.DialUsername(config.Username)) + } + if config.Password != "" { + dialOptions = append(dialOptions, redigo.DialPassword(config.Password)) + } + if config.RedisUseSsl { + dialOptions = append(dialOptions, redigo.DialUseTLS(true)) + } + return dialOptions +} + +func newPool(config models.DbConnection) *redigo.Pool { + + newRedisPool := &redigo.Pool{ + MaxIdle: 10, + IdleTimeout: 2 * time.Minute, + + Dial: func() (redigo.Conn, error) { + c, err := redigo.Dial("tcp", config.HostUrl, getDialOptions(config)...) + if err != nil { + fmt.Println("Error connecting to redis") + } + helper.Check(err) + return c, err + }, + + TestOnBorrow: func(c redigo.Conn, t time.Time) error { + _, err := c.Do("PING") + return err + }, + } + return newRedisPool +} + +// Upgrade migrates the DB to a new Gokapi version, if required +func (p DatabaseProvider) Upgrade(currentDbVersion int) { + // Currently no upgrade necessary + return +} + +const keyDbVersion = "dbversion" + +// GetDbVersion gets the version number of the database +func (p DatabaseProvider) GetDbVersion() int { + key, _ := p.getKeyInt(keyDbVersion) + return key +} + +// SetDbVersion sets the version number of the database +func (p DatabaseProvider) SetDbVersion(currentVersion int) { + p.setKey(keyDbVersion, currentVersion) +} + +// Close the database connection +func (p DatabaseProvider) Close() { + err := p.pool.Close() + if err != nil { + fmt.Println(err) + } +} + +// RunGarbageCollection runs the databases GC +func (p DatabaseProvider) RunGarbageCollection() { + // No cleanup required +} + +// Function to get all hashmaps with a given prefix +func (p DatabaseProvider) getAllValuesWithPrefix(prefix string) map[string]any { + result := make(map[string]any) + allKeys := p.getAllKeysWithPrefix(prefix) + for _, key := range allKeys { + value, err := p.getKeyRaw(key) + if errors.Is(err, redigo.ErrNil) { + continue + } + helper.Check(err) + result[key] = value + } + return result +} + +// Function to get all hashmaps with a given prefix +func (p DatabaseProvider) getAllHashesWithPrefix(prefix string) map[string][]any { + result := make(map[string][]any) + allKeys := p.getAllKeysWithPrefix(prefix) + for _, key := range allKeys { + hashMap, ok := p.getHashMap(key) + if !ok { + continue + } + result[key] = hashMap + } + return result +} + +func (p DatabaseProvider) getAllKeysWithPrefix(prefix string) []string { + var result []string + conn := p.pool.Get() + defer conn.Close() + fullPrefix := p.dbPrefix + prefix + cursor := 0 + for { + reply, err := redigo.Values(conn.Do("SCAN", cursor, "MATCH", fullPrefix+"*", "COUNT", 100)) + helper.Check(err) + + cursor, _ = redigo.Int(reply[0], nil) + keys, _ := redigo.Strings(reply[1], nil) + for _, key := range keys { + result = append(result, strings.Replace(key, p.dbPrefix, "", 1)) + } + if cursor == 0 { + break + } + } + return result +} + +func (p DatabaseProvider) setKey(id string, content any) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("SET", p.dbPrefix+id, content) + helper.Check(err) +} + +func (p DatabaseProvider) getKeyRaw(id string) (any, error) { + conn := p.pool.Get() + defer conn.Close() + return conn.Do("GET", p.dbPrefix+id) +} + +func (p DatabaseProvider) getKeyString(id string) (string, bool) { + result, err := redigo.String(p.getKeyRaw(id)) + if result == "" { + return "", false + } + helper.Check(err) + return result, true +} + +func (p DatabaseProvider) getKeyInt(id string) (int, bool) { + result, err := p.getKeyRaw(id) + if result == nil { + return 0, false + } + resultInt, err2 := redigo.Int(result, err) + helper.Check(err2) + return resultInt, true +} +func (p DatabaseProvider) getKeyBytes(id string) ([]byte, bool) { + result, err := p.getKeyRaw(id) + if result == nil { + return nil, false + } + resultInt, err2 := redigo.Bytes(result, err) + helper.Check(err2) + return resultInt, true +} + +func (p DatabaseProvider) getHashMap(id string) ([]any, bool) { + conn := p.pool.Get() + defer conn.Close() + result, err := redigo.Values(conn.Do("HGETALL", p.dbPrefix+id)) + helper.Check(err) + if len(result) == 0 { + return nil, false + } + return result, true +} + +func (p DatabaseProvider) buildArgs(id string) redigo.Args { + return redigo.Args{}.Add(p.dbPrefix + id) +} + +func (p DatabaseProvider) setHashMap(content redigo.Args) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("HMSET", content...) + helper.Check(err) +} + +func (p DatabaseProvider) setExpiryAt(id string, expiry int64) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("EXPIREAT", p.dbPrefix+id, strconv.FormatInt(expiry, 10)) + helper.Check(err) +} +func (p DatabaseProvider) setExpiryInSeconds(id string, expiry int64) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("EXPIRE", p.dbPrefix+id, strconv.FormatInt(expiry, 10)) + helper.Check(err) +} + +func (p DatabaseProvider) deleteKey(id string) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("DEL", p.dbPrefix+id) + helper.Check(err) +} + +func (p DatabaseProvider) runEval(cmd string) { + conn := p.pool.Get() + defer conn.Close() + _, err := conn.Do("EVAL", cmd, "0") + helper.Check(err) +} + +func (p DatabaseProvider) deleteAllWithPrefix(prefix string) { + p.runEval("for _,k in ipairs(redis.call('keys','" + p.dbPrefix + prefix + "*')) do redis.call('del',k) end") +} diff --git a/internal/configuration/database/provider/redis/Redis_test.go b/internal/configuration/database/provider/redis/Redis_test.go new file mode 100644 index 0000000..61cb67f --- /dev/null +++ b/internal/configuration/database/provider/redis/Redis_test.go @@ -0,0 +1,431 @@ +package redis + +import ( + "github.com/alicebob/miniredis/v2" + "github.com/forceu/gokapi/internal/models" + "github.com/forceu/gokapi/internal/test" + redigo "github.com/gomodule/redigo/redis" + "log" + "os" + "slices" + "testing" + "time" +) + +var config = models.DbConnection{ + RedisPrefix: "test_", + HostUrl: "127.0.0.1:16379", + Type: 1, // dbabstraction.TypeRedis +} + +var mRedis *miniredis.Miniredis + +func TestMain(m *testing.M) { + + mRedis = miniredis.NewMiniRedis() + err := mRedis.StartAddr("127.0.0.1:16379") + if err != nil { + log.Fatal("Could not start miniredis") + } + defer mRedis.Close() + exitVal := m.Run() + os.Exit(exitVal) +} + +var dbInstance DatabaseProvider + +func TestDatabaseProvider_Init(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + instance.Close() + _, err = New(models.DbConnection{}) + test.IsNotNil(t, err) + defer test.ExpectPanic(t) + _, err = New(models.DbConnection{ + RedisPrefix: "test_", + HostUrl: "invalid:11", + Type: 1, // dbabstraction.TypeRedis + }) + test.IsNotNil(t, err) +} + +func TestDatabaseProvider_GetType(t *testing.T) { + test.IsEqualInt(t, dbInstance.GetType(), 1) +} + +func TestDatabaseProvider_Upgrade(t *testing.T) { + var err error + dbInstance, err = New(config) + test.IsNil(t, err) + dbInstance.Upgrade(19) +} + +func TestDatabaseProvider_GetDbVersion(t *testing.T) { + version := dbInstance.GetDbVersion() + test.IsEqualInt(t, version, 0) + dbInstance.SetDbVersion(99) + test.IsEqualInt(t, dbInstance.GetDbVersion(), 99) + dbInstance.SetDbVersion(0) +} + +func TestDatabaseProvider_RunGarbageCollection(t *testing.T) { + dbInstance.RunGarbageCollection() +} + +func TestGetDialOptions(t *testing.T) { + result := getDialOptions(config) + test.IsEqualInt(t, len(result), 1) + newConfig := config + newConfig.Username = "123" + newConfig.Password = "456" + newConfig.RedisUseSsl = true + result = getDialOptions(newConfig) + test.IsEqualInt(t, len(result), 4) +} + +func TestGetKey(t *testing.T) { + key, ok := dbInstance.getKeyString("test1") + test.IsEqualString(t, key, "") + test.IsEqualBool(t, ok, false) + dbInstance.setKey("test1", "content") + key, ok = dbInstance.getKeyString("test1") + test.IsEqualString(t, key, "content") + test.IsEqualBool(t, ok, true) + dbInstance.deleteKey("test1") + key, ok = dbInstance.getKeyString("test1") + test.IsEqualString(t, key, "") + + keyInt, ok := dbInstance.getKeyInt("test2") + test.IsEqualInt(t, keyInt, 0) + test.IsEqualBool(t, ok, false) + dbInstance.setKey("test2", 2) + keyInt, ok = dbInstance.getKeyInt("test2") + test.IsEqualInt(t, keyInt, 2) + test.IsEqualBool(t, ok, true) + dbInstance.setKey("test2", 0) + keyInt, ok = dbInstance.getKeyInt("test2") + test.IsEqualInt(t, keyInt, 0) + test.IsEqualBool(t, ok, true) + + bytes, ok := dbInstance.getKeyBytes("test3") + test.IsEqualInt(t, len(bytes), 0) + test.IsEqualBool(t, ok, false) + dbInstance.setKey("test3", []byte("test")) + bytes, ok = dbInstance.getKeyBytes("test3") + test.IsEqualString(t, string(bytes), "test") + test.IsEqualBool(t, ok, true) +} + +func TestExpiration(t *testing.T) { + dbInstance.setKey("expTest", "test") + dbInstance.setKey("expTest2", "test2") + _, ok := dbInstance.getKeyString("expTest") + test.IsEqualBool(t, ok, true) + _, ok = dbInstance.getKeyString("expTest2") + test.IsEqualBool(t, ok, true) + dbInstance.setExpiryInSeconds("expTest", 1) + dbInstance.setExpiryAt("expTest2", time.Now().Add(1*time.Second).Unix()) + _, ok = dbInstance.getKeyString("expTest") + test.IsEqualBool(t, ok, true) + _, ok = dbInstance.getKeyString("expTest2") + test.IsEqualBool(t, ok, true) + mRedis.FastForward(2 * time.Second) + _, ok = dbInstance.getKeyString("expTest") + test.IsEqualBool(t, ok, false) + _, ok = dbInstance.getKeyString("expTest2") + test.IsEqualBool(t, ok, false) +} + +func TestDeleteAll(t *testing.T) { + dbInstance.setKey("delTest", "test") + dbInstance.setKey("delTest2", "test2") + dbInstance.setKey("delTest3", "test2") + + keys := dbInstance.getAllKeysWithPrefix("delTest") + test.IsEqualInt(t, len(keys), 3) + dbInstance.deleteAllWithPrefix("delTest") + keys = dbInstance.getAllKeysWithPrefix("delTest") + test.IsEqualInt(t, len(keys), 0) +} + +func TestGetAllValuesWithPrefix(t *testing.T) { + content := make(map[string]string) + content["alTest"] = "test" + content["alTest2"] = "test2" + content["alTest3"] = "test3" + content["alTest4"] = "test4" + for k, v := range content { + dbInstance.setKey(k, v) + } + keys := dbInstance.getAllValuesWithPrefix("alTest") + test.IsEqualInt(t, len(keys), 4) + for k, v := range keys { + result, err := redigo.String(v, nil) + test.IsNil(t, err) + test.IsEqualString(t, result, content[k]) + } +} + +func TestGetHashmap(t *testing.T) { + hmap, ok := dbInstance.getHashMap("newmap") + test.IsEqualBool(t, hmap == nil, true) + test.IsEqualBool(t, ok, false) + + content := make(map[string]string) + content["alTest1"] = "test" + content["alTest2"] = "test2" + content["alTest3"] = "test3" + content["alTest4"] = "test4" + dbInstance.setHashMap(dbInstance.buildArgs("newmap").AddFlat(content)) + hmap, ok = dbInstance.getHashMap("newmap") + test.IsEqualBool(t, ok, true) + hmapString, err := redigo.StringMap(hmap, nil) + test.IsNil(t, err) + for k, v := range content { + test.IsEqualString(t, hmapString[k], v) + } + + content2 := make(map[string]string) + content2["alTest4"] = "test4" + content2["alTest5"] = "test5" + content2["alTest6"] = "test6" + content2["alTest7"] = "test7" + dbInstance.setHashMap(dbInstance.buildArgs("newmap2").AddFlat(content2)) + + maps := dbInstance.getAllHashesWithPrefix("newmap") + test.IsEqualInt(t, len(maps), 2) +} + +func TestApiKeys(t *testing.T) { + keys := dbInstance.GetAllApiKeys() + test.IsEqualInt(t, len(keys), 0) + _, ok := dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, false) + + newKey := models.ApiKey{ + Id: "newkey", + FriendlyName: "New Key", + LastUsed: 1234, + Permissions: 1, + } + dbInstance.SaveApiKey(newKey) + retrievedKey, ok := dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, retrievedKey.Id == newKey.Id, true) + test.IsEqualBool(t, retrievedKey.FriendlyName == newKey.FriendlyName, true) + test.IsEqualBool(t, retrievedKey.LastUsed == newKey.LastUsed, true) + test.IsEqualBool(t, retrievedKey.Permissions == newKey.Permissions, true) + + dbInstance.SaveApiKey(models.ApiKey{ + Id: "123", + FriendlyName: "34", + LastUsed: 0, + Permissions: 0, + }) + + keys = dbInstance.GetAllApiKeys() + test.IsEqualInt(t, len(keys), 2) + + dbInstance.DeleteApiKey("newkey") + _, ok = dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, false) + + newKey.LastUsed = 10 + dbInstance.UpdateTimeApiKey(newKey) + key, ok := dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, key.LastUsed == 10, true) +} + +func TestE2EConfig(t *testing.T) { + e2econfig := models.E2EInfoEncrypted{ + Version: 1, + Nonce: []byte("testnonce"), + Content: []byte("testcontent"), + AvailableFiles: nil, + } + dbInstance.SaveEnd2EndInfo(e2econfig) + retrieved := dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, retrieved.Version, 1) + test.IsEqualString(t, string(retrieved.Nonce), "testnonce") + test.IsEqualString(t, string(retrieved.Content), "testcontent") + dbInstance.DeleteEnd2EndInfo() + retrieved = dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, retrieved.Version, 0) +} + +func TestHotlink(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + dbInstance = instance + dbInstance.SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: time.Now().Add(time.Hour).Unix()}) + + hotlink, ok := dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, hotlink, "testfile") + _, ok = dbInstance.GetHotlink("invalid") + test.IsEqualBool(t, ok, false) + + dbInstance.DeleteHotlink("invalid") + _, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + dbInstance.DeleteHotlink("testlink") + _, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, false) + + dbInstance.SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: 0, UnlimitedTime: true}) + hotlink, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, hotlink, "testfile") + + dbInstance.SaveHotlink(models.File{Id: "file2", Name: "file2.txt", HotlinkId: "link2", ExpireAt: time.Now().Add(time.Hour).Unix()}) + dbInstance.SaveHotlink(models.File{Id: "file3", Name: "file3.txt", HotlinkId: "link3", ExpireAt: time.Now().Add(time.Hour).Unix()}) + + hotlinks := dbInstance.GetAllHotlinks() + test.IsEqualInt(t, len(hotlinks), 3) + test.IsEqualBool(t, slices.Contains(hotlinks, "testlink"), true) + test.IsEqualBool(t, slices.Contains(hotlinks, "link2"), true) + test.IsEqualBool(t, slices.Contains(hotlinks, "link3"), true) + dbInstance.DeleteHotlink("") + hotlinks = dbInstance.GetAllHotlinks() + test.IsEqualInt(t, len(hotlinks), 3) +} + +func TestSession(t *testing.T) { + renewAt := time.Now().Add(1 * time.Hour).Unix() + dbInstance.SaveSession("newsession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + + session, ok := dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, session.RenewAt == renewAt, true) + + dbInstance.DeleteSession("newsession") + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, false) + + dbInstance.SaveSession("newsession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + + dbInstance.SaveSession("anothersession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, true) + _, ok = dbInstance.GetSession("anothersession") + test.IsEqualBool(t, ok, true) + + dbInstance.DeleteAllSessions() + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, false) + _, ok = dbInstance.GetSession("anothersession") + test.IsEqualBool(t, ok, false) +} + +func TestUploadDefaults(t *testing.T) { + defaults, ok := dbInstance.GetUploadDefaults() + test.IsEqualBool(t, ok, false) + dbInstance.SaveUploadDefaults(models.LastUploadValues{ + Downloads: 20, + TimeExpiry: 30, + Password: "abcd", + UnlimitedDownload: true, + UnlimitedTime: true, + }) + defaults, ok = dbInstance.GetUploadDefaults() + test.IsEqualBool(t, ok, true) + test.IsEqualInt(t, defaults.Downloads, 20) + test.IsEqualInt(t, defaults.TimeExpiry, 30) + test.IsEqualString(t, defaults.Password, "abcd") + test.IsEqualBool(t, defaults.UnlimitedDownload, true) + test.IsEqualBool(t, defaults.UnlimitedTime, true) +} + +func TestUploadStatus(t *testing.T) { + allStatus := dbInstance.GetAllUploadStatus() + test.IsEqualInt(t, len(allStatus), 0) + newStatus := models.UploadStatus{ + ChunkId: "testid", + CurrentStatus: 1, + } + retrievedStatus, ok := dbInstance.GetUploadStatus("testid") + test.IsEqualBool(t, ok, false) + test.IsEqualBool(t, retrievedStatus == models.UploadStatus{}, true) + dbInstance.SaveUploadStatus(newStatus) + retrievedStatus, ok = dbInstance.GetUploadStatus("testid") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedStatus.ChunkId, "testid") + test.IsEqualInt(t, retrievedStatus.CurrentStatus, 1) + allStatus = dbInstance.GetAllUploadStatus() + test.IsEqualInt(t, len(allStatus), 1) +} + +func TestMetaData(t *testing.T) { + files := dbInstance.GetAllMetadata() + test.IsEqualInt(t, len(files), 0) + + dbInstance.SaveMetaData(models.File{Id: "testfile", Name: "test.txt", ExpireAt: time.Now().Add(time.Hour).Unix()}) + files = dbInstance.GetAllMetadata() + test.IsEqualInt(t, len(files), 1) + test.IsEqualString(t, files["testfile"].Name, "test.txt") + + file, ok := dbInstance.GetMetaDataById("testfile") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, file.Id, "testfile") + _, ok = dbInstance.GetMetaDataById("invalid") + test.IsEqualBool(t, ok, false) + + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 1) + dbInstance.DeleteMetaData("invalid") + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 1) + + test.IsEqualBool(t, file.UnlimitedDownloads, false) + test.IsEqualBool(t, file.UnlimitedTime, false) + + dbInstance.DeleteMetaData("testfile") + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 0) + + dbInstance.SaveMetaData(models.File{ + Id: "test2", + Name: "test2", + UnlimitedDownloads: true, + UnlimitedTime: false, + }) + + file, ok = dbInstance.GetMetaDataById("test2") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, file.UnlimitedDownloads, true) + test.IsEqualBool(t, file.UnlimitedTime, false) + + dbInstance.SaveMetaData(models.File{ + Id: "test3", + Name: "test3", + UnlimitedDownloads: false, + UnlimitedTime: true, + }) + file, ok = dbInstance.GetMetaDataById("test3") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, file.UnlimitedDownloads, false) + test.IsEqualBool(t, file.UnlimitedTime, true) + dbInstance.Close() + defer test.ExpectPanic(t) + _ = dbInstance.GetAllMetadata() +} + +func TestGetAllMetaDataIds(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + + ids := instance.GetAllMetaDataIds() + test.IsEqualString(t, ids[0], "test2") + test.IsEqualString(t, ids[1], "test3") + + instance.Close() + defer test.ExpectPanic(t) + _ = instance.GetAllMetaDataIds() +} diff --git a/internal/configuration/database/provider/redis/apikeys.go b/internal/configuration/database/provider/redis/apikeys.go new file mode 100644 index 0000000..01a4c4a --- /dev/null +++ b/internal/configuration/database/provider/redis/apikeys.go @@ -0,0 +1,57 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" + "strings" +) + +const ( + prefixApiKeys = "apikey:" +) + +func dbToApiKey(id string, input []any) (models.ApiKey, error) { + var result models.ApiKey + err := redigo.ScanStruct(input, &result) + result.Id = strings.Replace(id, prefixApiKeys, "", 1) + return result, err +} + +// GetAllApiKeys returns a map with all API keys +func (p DatabaseProvider) GetAllApiKeys() map[string]models.ApiKey { + result := make(map[string]models.ApiKey) + maps := p.getAllHashesWithPrefix(prefixApiKeys) + for k, v := range maps { + apiKey, err := dbToApiKey(k, v) + helper.Check(err) + result[apiKey.Id] = apiKey + } + return result +} + +// GetApiKey returns a models.ApiKey if valid or false if the ID is not valid +func (p DatabaseProvider) GetApiKey(id string) (models.ApiKey, bool) { + result, ok := p.getHashMap(prefixApiKeys + id) + if !ok { + return models.ApiKey{}, false + } + apikey, err := dbToApiKey(id, result) + helper.Check(err) + return apikey, true +} + +// SaveApiKey saves the API key to the database +func (p DatabaseProvider) SaveApiKey(apikey models.ApiKey) { + p.setHashMap(p.buildArgs(prefixApiKeys + apikey.Id).AddFlat(apikey)) +} + +// UpdateTimeApiKey writes the content of LastUsage to the database +func (p DatabaseProvider) UpdateTimeApiKey(apikey models.ApiKey) { + p.SaveApiKey(apikey) +} + +// DeleteApiKey deletes an API key with the given ID +func (p DatabaseProvider) DeleteApiKey(id string) { + p.deleteKey(prefixApiKeys + id) +} diff --git a/internal/configuration/database/provider/redis/e2econfig.go b/internal/configuration/database/provider/redis/e2econfig.go new file mode 100644 index 0000000..ebda303 --- /dev/null +++ b/internal/configuration/database/provider/redis/e2econfig.go @@ -0,0 +1,31 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" +) + +const idE2EInfo = "e2einfo" + +// SaveEnd2EndInfo stores the encrypted e2e info +func (p DatabaseProvider) SaveEnd2EndInfo(info models.E2EInfoEncrypted) { + p.setHashMap(p.buildArgs(idE2EInfo).AddFlat(info)) +} + +// GetEnd2EndInfo retrieves the encrypted e2e info +func (p DatabaseProvider) GetEnd2EndInfo() models.E2EInfoEncrypted { + result := models.E2EInfoEncrypted{} + value, ok := p.getHashMap(idE2EInfo) + if !ok { + return models.E2EInfoEncrypted{} + } + err := redigo.ScanStruct(value, &result) + helper.Check(err) + return result +} + +// DeleteEnd2EndInfo resets the encrypted e2e info +func (p DatabaseProvider) DeleteEnd2EndInfo() { + p.deleteKey(idE2EInfo) +} diff --git a/internal/configuration/database/provider/redis/hotlinks.go b/internal/configuration/database/provider/redis/hotlinks.go new file mode 100644 index 0000000..5e01957 --- /dev/null +++ b/internal/configuration/database/provider/redis/hotlinks.go @@ -0,0 +1,34 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/models" + "strings" +) + +const ( + prefixHotlinks = "hl:" +) + +// GetHotlink returns the id of the file associated or false if not found +func (p DatabaseProvider) GetHotlink(id string) (string, bool) { + return p.getKeyString(prefixHotlinks + id) +} + +// GetAllHotlinks returns an array with all hotlink ids +func (p DatabaseProvider) GetAllHotlinks() []string { + result := make([]string, 0) + for _, key := range p.getAllKeysWithPrefix(prefixHotlinks) { + result = append(result, strings.Replace(key, prefixHotlinks, "", 1)) + } + return result +} + +// SaveHotlink stores the hotlink associated with the file in the database +func (p DatabaseProvider) SaveHotlink(file models.File) { + p.setKey(prefixHotlinks+file.HotlinkId, file.Id) +} + +// DeleteHotlink deletes a hotlink with the given hotlink ID +func (p DatabaseProvider) DeleteHotlink(id string) { + p.deleteKey(prefixHotlinks + id) +} diff --git a/internal/configuration/database/provider/redis/metadata.go b/internal/configuration/database/provider/redis/metadata.go new file mode 100644 index 0000000..d3b804a --- /dev/null +++ b/internal/configuration/database/provider/redis/metadata.go @@ -0,0 +1,68 @@ +package redis + +import ( + "bytes" + "encoding/gob" + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" + "strings" +) + +const ( + prefixMetaData = "fmeta:" +) + +func dbToMetaData(input []byte) models.File { + var result models.File + buf := bytes.NewBuffer(input) + dec := gob.NewDecoder(buf) + err := dec.Decode(&result) + helper.Check(err) + return result +} + +// GetAllMetadata returns a map of all available files +func (p DatabaseProvider) GetAllMetadata() map[string]models.File { + result := make(map[string]models.File) + allMetaData := p.getAllValuesWithPrefix(prefixMetaData) + for _, metaData := range allMetaData { + content, err := redigo.Bytes(metaData, nil) + helper.Check(err) + file := dbToMetaData(content) + result[file.Id] = file + } + return result +} + +// GetAllMetaDataIds returns all Ids that contain metadata +func (p DatabaseProvider) GetAllMetaDataIds() []string { + result := make([]string, 0) + for _, key := range p.getAllKeysWithPrefix(prefixMetaData) { + result = append(result, strings.Replace(key, prefixMetaData, "", 1)) + } + return result +} + +// GetMetaDataById returns a models.File from the ID passed or false if the id is not valid +func (p DatabaseProvider) GetMetaDataById(id string) (models.File, bool) { + input, ok := p.getKeyBytes(prefixMetaData + id) + if !ok { + return models.File{}, false + } + return dbToMetaData(input), true +} + +// SaveMetaData stores the metadata of a file to the disk +func (p DatabaseProvider) SaveMetaData(file models.File) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err := enc.Encode(file) + helper.Check(err) + p.setKey(prefixMetaData+file.Id, buf.Bytes()) +} + +// DeleteMetaData deletes information about a file +func (p DatabaseProvider) DeleteMetaData(id string) { + p.deleteKey(prefixMetaData + id) +} diff --git a/internal/configuration/database/provider/redis/sessions.go b/internal/configuration/database/provider/redis/sessions.go new file mode 100644 index 0000000..e6383a0 --- /dev/null +++ b/internal/configuration/database/provider/redis/sessions.go @@ -0,0 +1,39 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" +) + +const ( + prefixSessions = "se:" +) + +// GetSession returns the session with the given ID or false if not a valid ID +func (p DatabaseProvider) GetSession(id string) (models.Session, bool) { + hashmapEntry, ok := p.getHashMap(prefixSessions + id) + if !ok { + return models.Session{}, false + } + var result models.Session + err := redigo.ScanStruct(hashmapEntry, &result) + helper.Check(err) + return result, true +} + +// SaveSession stores the given session. After the expiry passed, it will be deleted automatically +func (p DatabaseProvider) SaveSession(id string, session models.Session) { + p.setHashMap(p.buildArgs(prefixSessions + id).AddFlat(session)) + p.setExpiryAt(prefixSessions+id, session.ValidUntil) +} + +// DeleteSession deletes a session with the given ID +func (p DatabaseProvider) DeleteSession(id string) { + p.deleteKey(prefixSessions + id) +} + +// DeleteAllSessions logs all users out +func (p DatabaseProvider) DeleteAllSessions() { + p.deleteAllWithPrefix(prefixSessions) +} diff --git a/internal/configuration/database/provider/redis/uploaddefaults.go b/internal/configuration/database/provider/redis/uploaddefaults.go new file mode 100644 index 0000000..0bbd4e1 --- /dev/null +++ b/internal/configuration/database/provider/redis/uploaddefaults.go @@ -0,0 +1,30 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" +) + +const ( + idUploadDefaults = "uploadDefaults" +) + +// GetUploadDefaults returns the last used setting for amount of downloads allowed, last expiry in days and +// a password for the file +func (p DatabaseProvider) GetUploadDefaults() (models.LastUploadValues, bool) { + var result models.LastUploadValues + values, ok := p.getHashMap(idUploadDefaults) + if !ok { + return models.LastUploadValues{}, false + } + + err := redigo.ScanStruct(values, &result) + helper.Check(err) + return result, true +} + +// SaveUploadDefaults saves the last used setting for an upload +func (p DatabaseProvider) SaveUploadDefaults(values models.LastUploadValues) { + p.setHashMap(p.buildArgs(idUploadDefaults).AddFlat(values)) +} diff --git a/internal/configuration/database/provider/redis/uploadstatus.go b/internal/configuration/database/provider/redis/uploadstatus.go new file mode 100644 index 0000000..3f8778b --- /dev/null +++ b/internal/configuration/database/provider/redis/uploadstatus.go @@ -0,0 +1,45 @@ +package redis + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + redigo "github.com/gomodule/redigo/redis" + "strings" +) + +const ( + prefixUploadStatus = "us:" +) + +// GetAllUploadStatus returns all UploadStatus values from the past 24 hours +func (p DatabaseProvider) GetAllUploadStatus() []models.UploadStatus { + var result = make([]models.UploadStatus, 0) + for k, v := range p.getAllValuesWithPrefix(prefixUploadStatus) { + status, err := redigo.Int(v, nil) + helper.Check(err) + result = append(result, models.UploadStatus{ + ChunkId: strings.Replace(k, prefixUploadStatus, "", 1), + CurrentStatus: status, + }) + } + return result +} + +// GetUploadStatus returns a models.UploadStatus from the ID passed or false if the id is not valid +func (p DatabaseProvider) GetUploadStatus(id string) (models.UploadStatus, bool) { + status, ok := p.getKeyInt(prefixUploadStatus + id) + if !ok { + return models.UploadStatus{}, false + } + result := models.UploadStatus{ + ChunkId: id, + CurrentStatus: status, + } + return result, true +} + +// SaveUploadStatus stores the upload status of a new file for 24 hours +func (p DatabaseProvider) SaveUploadStatus(status models.UploadStatus) { + p.setKey(prefixUploadStatus+status.ChunkId, status.CurrentStatus) + p.setExpiryInSeconds(prefixUploadStatus+status.ChunkId, 24*60*60) // 24h +} diff --git a/internal/configuration/database/provider/sqlite/Sqlite.go b/internal/configuration/database/provider/sqlite/Sqlite.go new file mode 100644 index 0000000..02e732a --- /dev/null +++ b/internal/configuration/database/provider/sqlite/Sqlite.go @@ -0,0 +1,193 @@ +package sqlite + +import ( + "database/sql" + "errors" + "fmt" + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + "os" + "path/filepath" + // Required for sqlite driver + _ "modernc.org/sqlite" +) + +// DatabaseProvider contains the database instance +type DatabaseProvider struct { + sqliteDb *sql.DB +} + +// New returns an instance +func New(dbConfig models.DbConnection) (DatabaseProvider, error) { + return DatabaseProvider{}.init(dbConfig) +} + +// GetType returns 0, for being a Sqlite interface +func (p DatabaseProvider) GetType() int { + return 0 // dbabstraction.Sqlite +} + +// Upgrade migrates the DB to a new Gokapi version, if required +func (p DatabaseProvider) Upgrade(currentDbVersion int) { + // < v1.9.0 + if currentDbVersion < 2 { + // Remove Column LastUpdate, deleting old data + err := p.rawSqlite(`DROP TABLE UploadStatus; CREATE TABLE "UploadStatus" ( + "ChunkId" TEXT NOT NULL UNIQUE, + "CurrentStatus" INTEGER NOT NULL, + "CreationDate" INTEGER NOT NULL, + PRIMARY KEY("ChunkId") + ) WITHOUT ROWID;`) + helper.Check(err) + + // Remove Column LastUsedString, keeping old data + err = p.rawSqlite(`CREATE TABLE "ApiKeys_New" ( + "Id" TEXT NOT NULL UNIQUE, + "FriendlyName" TEXT NOT NULL, + "LastUsed" INTEGER NOT NULL, + "Permissions" INTEGER NOT NULL DEFAULT 0, + PRIMARY KEY("Id") + ) WITHOUT ROWID; + INSERT INTO "ApiKeys_New" (Id, FriendlyName, LastUsed, Permissions) + SELECT Id, FriendlyName, LastUsed, Permissions + FROM "ApiKeys"; + DROP TABLE "ApiKeys"; + ALTER TABLE "ApiKeys_New" RENAME TO "ApiKeys";`) + helper.Check(err) + } +} + +// GetDbVersion gets the version number of the database +func (p DatabaseProvider) GetDbVersion() int { + var userVersion int + row := p.sqliteDb.QueryRow("PRAGMA user_version;") + err := row.Scan(&userVersion) + helper.Check(err) + return userVersion +} + +// SetDbVersion sets the version number of the database +func (p DatabaseProvider) SetDbVersion(newVersion int) { + _, err := p.sqliteDb.Exec(fmt.Sprintf("PRAGMA user_version = %d;", newVersion)) + helper.Check(err) +} + +// Init connects to the database and creates the table structure, if necessary +func (p DatabaseProvider) init(dbConfig models.DbConnection) (DatabaseProvider, error) { + if dbConfig.HostUrl == "" { + return DatabaseProvider{}, errors.New("empty database url was provided") + } + if p.sqliteDb == nil { + cleanPath := filepath.Clean(dbConfig.HostUrl) + dataDir := filepath.Dir(cleanPath) + var err error + if !helper.FolderExists(dataDir) { + err = os.MkdirAll(dataDir, 0700) + if err != nil { + return DatabaseProvider{}, err + } + } + p.sqliteDb, err = sql.Open("sqlite", cleanPath+"?_pragma=busy_timeout=10000&_pragma=journal_mode=WAL") + if err != nil { + return DatabaseProvider{}, err + } + p.sqliteDb.SetMaxOpenConns(10000) + p.sqliteDb.SetMaxIdleConns(10000) + + if !helper.FileExists(dbConfig.HostUrl) { + return p, p.createNewDatabase() + } + err = p.sqliteDb.Ping() + return p, err + } + return p, nil +} + +// Close the database connection +func (p DatabaseProvider) Close() { + if p.sqliteDb != nil { + err := p.sqliteDb.Close() + if err != nil { + fmt.Println(err) + } + } + p.sqliteDb = nil +} + +// RunGarbageCollection runs the databases GC +func (p DatabaseProvider) RunGarbageCollection() { + p.cleanExpiredSessions() + p.cleanUploadStatus() +} + +func (p DatabaseProvider) createNewDatabase() error { + sqlStmt := `CREATE TABLE "ApiKeys" ( + "Id" TEXT NOT NULL UNIQUE, + "FriendlyName" TEXT NOT NULL, + "LastUsed" INTEGER NOT NULL, + "Permissions" INTEGER NOT NULL DEFAULT 0, + PRIMARY KEY("Id") + ) WITHOUT ROWID; + CREATE TABLE "E2EConfig" ( + "id" INTEGER NOT NULL UNIQUE, + "Config" BLOB NOT NULL, + PRIMARY KEY("id" AUTOINCREMENT) + ); + CREATE TABLE "FileMetaData" ( + "Id" TEXT NOT NULL UNIQUE, + "Name" TEXT NOT NULL, + "Size" TEXT NOT NULL, + "SHA1" TEXT NOT NULL, + "ExpireAt" INTEGER NOT NULL, + "SizeBytes" INTEGER NOT NULL, + "ExpireAtString" TEXT NOT NULL, + "DownloadsRemaining" INTEGER NOT NULL, + "DownloadCount" INTEGER NOT NULL, + "PasswordHash" TEXT NOT NULL, + "HotlinkId" TEXT NOT NULL, + "ContentType" TEXT NOT NULL, + "AwsBucket" TEXT NOT NULL, + "Encryption" BLOB NOT NULL, + "UnlimitedDownloads" INTEGER NOT NULL, + "UnlimitedTime" INTEGER NOT NULL, + PRIMARY KEY("Id") + ); + CREATE TABLE "Hotlinks" ( + "Id" TEXT NOT NULL UNIQUE, + "FileId" TEXT NOT NULL UNIQUE, + PRIMARY KEY("Id") + ) WITHOUT ROWID; + CREATE TABLE "Sessions" ( + "Id" TEXT NOT NULL UNIQUE, + "RenewAt" INTEGER NOT NULL, + "ValidUntil" INTEGER NOT NULL, + PRIMARY KEY("Id") + ) WITHOUT ROWID; + CREATE TABLE "UploadConfig" ( + "id" INTEGER NOT NULL UNIQUE, + "Downloads" INTEGER, + "TimeExpiry" INTEGER, + "Password" TEXT, + "UnlimitedDownloads" INTEGER, + "UnlimitedTime" INTEGER, + PRIMARY KEY("id") + ); + CREATE TABLE "UploadStatus" ( + "ChunkId" TEXT NOT NULL UNIQUE, + "CurrentStatus" INTEGER NOT NULL, + "CreationDate" INTEGER NOT NULL, + PRIMARY KEY("ChunkId") + ) WITHOUT ROWID; +` + err := p.rawSqlite(sqlStmt) + return err +} + +// rawSqlite runs a raw SQL statement. Should only be used for upgrading +func (p DatabaseProvider) rawSqlite(statement string) error { + if p.sqliteDb == nil { + panic("Sqlite not initialised") + } + _, err := p.sqliteDb.Exec(statement) + return err +} diff --git a/internal/configuration/database/provider/sqlite/Sqlite_test.go b/internal/configuration/database/provider/sqlite/Sqlite_test.go new file mode 100644 index 0000000..8b7da03 --- /dev/null +++ b/internal/configuration/database/provider/sqlite/Sqlite_test.go @@ -0,0 +1,543 @@ +//go:build test + +package sqlite + +import ( + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" + "github.com/forceu/gokapi/internal/test" + "math" + "os" + "slices" + "sync" + "testing" + "time" +) + +var config = models.DbConnection{ + HostUrl: "./test/newfolder/gokapi.sqlite", + Type: 0, // dbabstraction.TypeSqlite +} + +func TestMain(m *testing.M) { + _ = os.Mkdir("test", 0777) + exitVal := m.Run() + _ = os.RemoveAll("test") + os.Exit(exitVal) +} + +var dbInstance DatabaseProvider + +func TestInit(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + test.FolderExists(t, "./test/newfolder") + instance.Close() + err = os.WriteFile("./test/newfolder/gokapi2.sqlite", []byte("invalid"), 0700) + test.IsNil(t, err) + instance, err = New(models.DbConnection{ + HostUrl: "./test/newfolder/gokapi2.sqlite", + Type: 0, // dbabstraction.TypeSqlite + }) + test.IsNotNil(t, err) + _, err = New(models.DbConnection{ + HostUrl: "", + Type: 0, // dbabstraction.TypeSqlite + }) + test.IsNotNil(t, err) +} + +func TestClose(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + instance.Close() + instance, err = New(config) + test.IsNil(t, err) + dbInstance = instance +} + +func TestDatabaseProvider_GetDbVersion(t *testing.T) { + version := dbInstance.GetDbVersion() + test.IsEqualInt(t, version, 0) + dbInstance.SetDbVersion(99) + test.IsEqualInt(t, dbInstance.GetDbVersion(), 99) + dbInstance.SetDbVersion(0) +} + +func TestMetaData(t *testing.T) { + files := dbInstance.GetAllMetadata() + test.IsEqualInt(t, len(files), 0) + + dbInstance.SaveMetaData(models.File{Id: "testfile", Name: "test.txt", ExpireAt: time.Now().Add(time.Hour).Unix()}) + files = dbInstance.GetAllMetadata() + test.IsEqualInt(t, len(files), 1) + test.IsEqualString(t, files["testfile"].Name, "test.txt") + + file, ok := dbInstance.GetMetaDataById("testfile") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, file.Id, "testfile") + _, ok = dbInstance.GetMetaDataById("invalid") + test.IsEqualBool(t, ok, false) + + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 1) + dbInstance.DeleteMetaData("invalid") + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 1) + + test.IsEqualBool(t, file.UnlimitedDownloads, false) + test.IsEqualBool(t, file.UnlimitedTime, false) + + dbInstance.DeleteMetaData("testfile") + test.IsEqualInt(t, len(dbInstance.GetAllMetadata()), 0) + + dbInstance.SaveMetaData(models.File{ + Id: "test2", + Name: "test2", + UnlimitedDownloads: true, + UnlimitedTime: false, + }) + + file, ok = dbInstance.GetMetaDataById("test2") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, file.UnlimitedDownloads, true) + test.IsEqualBool(t, file.UnlimitedTime, false) + + dbInstance.SaveMetaData(models.File{ + Id: "test3", + Name: "test3", + UnlimitedDownloads: false, + UnlimitedTime: true, + }) + file, ok = dbInstance.GetMetaDataById("test3") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, file.UnlimitedDownloads, false) + test.IsEqualBool(t, file.UnlimitedTime, true) + dbInstance.Close() + defer test.ExpectPanic(t) + _ = dbInstance.GetAllMetadata() +} + +func TestDatabaseProvider_GetType(t *testing.T) { + test.IsEqualInt(t, dbInstance.GetType(), 0) +} + +func TestGetAllMetaDataIds(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + dbInstance = instance + + ids := dbInstance.GetAllMetaDataIds() + test.IsEqualString(t, ids[0], "test2") + test.IsEqualString(t, ids[1], "test3") + + dbInstance.Close() + defer test.ExpectPanic(t) + _ = dbInstance.GetAllMetaDataIds() +} + +func TestHotlink(t *testing.T) { + instance, err := New(config) + test.IsNil(t, err) + dbInstance = instance + + dbInstance.SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: time.Now().Add(time.Hour).Unix()}) + + hotlink, ok := dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, hotlink, "testfile") + _, ok = dbInstance.GetHotlink("invalid") + test.IsEqualBool(t, ok, false) + + dbInstance.DeleteHotlink("invalid") + _, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + dbInstance.DeleteHotlink("testlink") + _, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, false) + + dbInstance.SaveHotlink(models.File{Id: "testfile", Name: "test.txt", HotlinkId: "testlink", ExpireAt: 0, UnlimitedTime: true}) + hotlink, ok = dbInstance.GetHotlink("testlink") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, hotlink, "testfile") + + dbInstance.SaveHotlink(models.File{Id: "file2", Name: "file2.txt", HotlinkId: "link2", ExpireAt: time.Now().Add(time.Hour).Unix()}) + dbInstance.SaveHotlink(models.File{Id: "file3", Name: "file3.txt", HotlinkId: "link3", ExpireAt: time.Now().Add(time.Hour).Unix()}) + + hotlinks := dbInstance.GetAllHotlinks() + test.IsEqualInt(t, len(hotlinks), 3) + test.IsEqualBool(t, slices.Contains(hotlinks, "testlink"), true) + test.IsEqualBool(t, slices.Contains(hotlinks, "link2"), true) + test.IsEqualBool(t, slices.Contains(hotlinks, "link3"), true) + dbInstance.DeleteHotlink("") + hotlinks = dbInstance.GetAllHotlinks() + test.IsEqualInt(t, len(hotlinks), 3) +} + +func TestApiKey(t *testing.T) { + dbInstance.SaveApiKey(models.ApiKey{ + Id: "newkey", + FriendlyName: "New Key", + LastUsed: 100, + Permissions: 20, + }) + dbInstance.SaveApiKey(models.ApiKey{ + Id: "newkey2", + FriendlyName: "New Key2", + LastUsed: 200, + Permissions: 40, + }) + + keys := dbInstance.GetAllApiKeys() + test.IsEqualInt(t, len(keys), 2) + test.IsEqualString(t, keys["newkey"].FriendlyName, "New Key") + test.IsEqualString(t, keys["newkey"].Id, "newkey") + test.IsEqualInt64(t, keys["newkey"].LastUsed, 100) + test.IsEqualBool(t, keys["newkey"].Permissions == 20, true) + + test.IsEqualInt(t, len(dbInstance.GetAllApiKeys()), 2) + dbInstance.DeleteApiKey("newkey2") + test.IsEqualInt(t, len(dbInstance.GetAllApiKeys()), 1) + + key, ok := dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, key.FriendlyName, "New Key") + _, ok = dbInstance.GetApiKey("newkey2") + test.IsEqualBool(t, ok, false) + + dbInstance.SaveApiKey(models.ApiKey{ + Id: "newkey", + FriendlyName: "Old Key", + LastUsed: 100, + }) + key, ok = dbInstance.GetApiKey("newkey") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, key.FriendlyName, "Old Key") +} + +func TestSession(t *testing.T) { + renewAt := time.Now().Add(1 * time.Hour).Unix() + dbInstance.SaveSession("newsession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + + session, ok := dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, true) + test.IsEqualBool(t, session.RenewAt == renewAt, true) + + dbInstance.DeleteSession("newsession") + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, false) + + dbInstance.SaveSession("newsession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + + dbInstance.SaveSession("anothersession", models.Session{ + RenewAt: renewAt, + ValidUntil: time.Now().Add(2 * time.Hour).Unix(), + }) + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, true) + _, ok = dbInstance.GetSession("anothersession") + test.IsEqualBool(t, ok, true) + + dbInstance.DeleteAllSessions() + _, ok = dbInstance.GetSession("newsession") + test.IsEqualBool(t, ok, false) + _, ok = dbInstance.GetSession("anothersession") + test.IsEqualBool(t, ok, false) +} + +func TestUploadDefaults(t *testing.T) { + defaults, ok := dbInstance.GetUploadDefaults() + test.IsEqualBool(t, ok, false) + dbInstance.SaveUploadDefaults(models.LastUploadValues{ + Downloads: 20, + TimeExpiry: 30, + Password: "abcd", + UnlimitedDownload: true, + UnlimitedTime: true, + }) + defaults, ok = dbInstance.GetUploadDefaults() + test.IsEqualBool(t, ok, true) + test.IsEqualInt(t, defaults.Downloads, 20) + test.IsEqualInt(t, defaults.TimeExpiry, 30) + test.IsEqualString(t, defaults.Password, "abcd") + test.IsEqualBool(t, defaults.UnlimitedDownload, true) + test.IsEqualBool(t, defaults.UnlimitedTime, true) +} + +func TestGarbageCollectionUploads(t *testing.T) { + orgiginalFunc := currentTime + currentTime = func() time.Time { + return time.Now().Add(-25 * time.Hour) + } + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctodelete1", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctodelete2", + CurrentStatus: 1, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctodelete3", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctodelete4", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctodelete5", + CurrentStatus: 1, + }) + currentTime = orgiginalFunc + + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctokeep1", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctokeep2", + CurrentStatus: 1, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctokeep3", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctokeep4", + CurrentStatus: 0, + }) + dbInstance.SaveUploadStatus(models.UploadStatus{ + ChunkId: "ctokeep5", + CurrentStatus: 1, + }) + for _, item := range []string{"ctodelete1", "ctodelete2", "ctodelete3", "ctodelete4", "ctokeep1", "ctokeep2", "ctokeep3", "ctokeep4"} { + _, result := dbInstance.GetUploadStatus(item) + test.IsEqualBool(t, result, true) + } + dbInstance.RunGarbageCollection() + for _, item := range []string{"ctodelete1", "ctodelete2", "ctodelete3", "ctodelete4"} { + _, result := dbInstance.GetUploadStatus(item) + test.IsEqualBool(t, result, false) + } + for _, item := range []string{"ctokeep1", "ctokeep2", "ctokeep3", "ctokeep4"} { + _, result := dbInstance.GetUploadStatus(item) + test.IsEqualBool(t, result, true) + } +} + +func TestGarbageCollectionSessions(t *testing.T) { + dbInstance.SaveSession("todelete1", models.Session{ + RenewAt: time.Now().Add(-10 * time.Second).Unix(), + ValidUntil: time.Now().Add(-10 * time.Second).Unix(), + }) + dbInstance.SaveSession("todelete2", models.Session{ + RenewAt: time.Now().Add(10 * time.Second).Unix(), + ValidUntil: time.Now().Add(-10 * time.Second).Unix(), + }) + dbInstance.SaveSession("tokeep1", models.Session{ + RenewAt: time.Now().Add(-10 * time.Second).Unix(), + ValidUntil: time.Now().Add(10 * time.Second).Unix(), + }) + dbInstance.SaveSession("tokeep2", models.Session{ + RenewAt: time.Now().Add(10 * time.Second).Unix(), + ValidUntil: time.Now().Add(10 * time.Second).Unix(), + }) + for _, item := range []string{"todelete1", "todelete2", "tokeep1", "tokeep2"} { + _, result := dbInstance.GetSession(item) + test.IsEqualBool(t, result, true) + } + dbInstance.RunGarbageCollection() + for _, item := range []string{"todelete1", "todelete2"} { + _, result := dbInstance.GetSession(item) + test.IsEqualBool(t, result, false) + } + for _, item := range []string{"tokeep1", "tokeep2"} { + _, result := dbInstance.GetSession(item) + test.IsEqualBool(t, result, true) + } +} + +func TestEnd2EndInfo(t *testing.T) { + info := dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, info.Version, 0) + test.IsEqualBool(t, info.HasBeenSetUp(), false) + + dbInstance.SaveEnd2EndInfo(models.E2EInfoEncrypted{ + Version: 1, + Nonce: []byte("testNonce1"), + Content: []byte("testContent1"), + AvailableFiles: nil, + }) + + info = dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, info.Version, 1) + test.IsEqualBool(t, info.HasBeenSetUp(), true) + test.IsEqualByteSlice(t, info.Nonce, []byte("testNonce1")) + test.IsEqualByteSlice(t, info.Content, []byte("testContent1")) + test.IsEqualBool(t, len(info.AvailableFiles) == 0, true) + + dbInstance.SaveEnd2EndInfo(models.E2EInfoEncrypted{ + Version: 2, + Nonce: []byte("testNonce2"), + Content: []byte("testContent2"), + AvailableFiles: nil, + }) + + info = dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, info.Version, 2) + test.IsEqualBool(t, info.HasBeenSetUp(), true) + test.IsEqualByteSlice(t, info.Nonce, []byte("testNonce2")) + test.IsEqualByteSlice(t, info.Content, []byte("testContent2")) + test.IsEqualBool(t, len(info.AvailableFiles) == 0, true) + + dbInstance.DeleteEnd2EndInfo() + info = dbInstance.GetEnd2EndInfo() + test.IsEqualInt(t, info.Version, 0) + test.IsEqualBool(t, info.HasBeenSetUp(), false) +} + +func TestUpdateTimeApiKey(t *testing.T) { + retrievedKey, ok := dbInstance.GetApiKey("key1") + test.IsEqualBool(t, ok, false) + test.IsEqualString(t, retrievedKey.Id, "") + + key := models.ApiKey{ + Id: "key1", + FriendlyName: "key1", + LastUsed: 100, + } + dbInstance.SaveApiKey(key) + key = models.ApiKey{ + Id: "key2", + FriendlyName: "key2", + LastUsed: 200, + } + dbInstance.SaveApiKey(key) + + retrievedKey, ok = dbInstance.GetApiKey("key1") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedKey.Id, "key1") + test.IsEqualInt64(t, retrievedKey.LastUsed, 100) + retrievedKey, ok = dbInstance.GetApiKey("key2") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedKey.Id, "key2") + test.IsEqualInt64(t, retrievedKey.LastUsed, 200) + + key.LastUsed = 300 + dbInstance.UpdateTimeApiKey(key) + + retrievedKey, ok = dbInstance.GetApiKey("key1") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedKey.Id, "key1") + test.IsEqualInt64(t, retrievedKey.LastUsed, 100) + retrievedKey, ok = dbInstance.GetApiKey("key2") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedKey.Id, "key2") + test.IsEqualInt64(t, retrievedKey.LastUsed, 300) +} + +func TestParallelConnectionsWritingAndReading(t *testing.T) { + var wg sync.WaitGroup + + simulatedConnection := func(t *testing.T) { + file := models.File{ + Id: helper.GenerateRandomString(10), + Name: helper.GenerateRandomString(10), + Size: "10B", + SHA1: "1289423794287598237489", + ExpireAt: math.MaxInt, + SizeBytes: 10, + ExpireAtString: "Never", + DownloadsRemaining: 10, + DownloadCount: 10, + PasswordHash: "", + HotlinkId: "", + ContentType: "", + AwsBucket: "", + Encryption: models.EncryptionInfo{}, + UnlimitedDownloads: false, + UnlimitedTime: false, + } + dbInstance.SaveMetaData(file) + retrievedFile, ok := dbInstance.GetMetaDataById(file.Id) + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedFile.Name, file.Name) + dbInstance.DeleteMetaData(file.Id) + _, ok = dbInstance.GetMetaDataById(file.Id) + test.IsEqualBool(t, ok, false) + } + + for i := 1; i <= 100; i++ { + wg.Add(1) + go func() { + defer wg.Done() + simulatedConnection(t) + }() + } + wg.Wait() +} + +func TestParallelConnectionsReading(t *testing.T) { + var wg sync.WaitGroup + + dbInstance.SaveApiKey(models.ApiKey{ + Id: "readtest", + FriendlyName: "readtest", + LastUsed: 40000, + }) + simulatedConnection := func(t *testing.T) { + _, ok := dbInstance.GetApiKey("readtest") + test.IsEqualBool(t, ok, true) + } + + for i := 1; i <= 1000; i++ { + wg.Add(1) + go func() { + defer wg.Done() + simulatedConnection(t) + }() + } + wg.Wait() +} + +func TestUploadStatus(t *testing.T) { + allStatus := dbInstance.GetAllUploadStatus() + found := false + test.IsEqualInt(t, len(allStatus), 5) + for _, status := range allStatus { + if status.ChunkId == "ctokeep5" { + found = true + } + } + test.IsEqualBool(t, found, true) + newStatus := models.UploadStatus{ + ChunkId: "testid", + CurrentStatus: 1, + } + retrievedStatus, ok := dbInstance.GetUploadStatus("testid") + test.IsEqualBool(t, ok, false) + test.IsEqualBool(t, retrievedStatus == models.UploadStatus{}, true) + dbInstance.SaveUploadStatus(newStatus) + retrievedStatus, ok = dbInstance.GetUploadStatus("testid") + test.IsEqualBool(t, ok, true) + test.IsEqualString(t, retrievedStatus.ChunkId, "testid") + test.IsEqualInt(t, retrievedStatus.CurrentStatus, 1) + allStatus = dbInstance.GetAllUploadStatus() + test.IsEqualInt(t, len(allStatus), 6) +} + +func TestDatabaseProvider_Upgrade(t *testing.T) { + dbInstance.Upgrade(0) +} + +func TestRawSql(t *testing.T) { + dbInstance.Close() + dbInstance.sqliteDb = nil + defer test.ExpectPanic(t) + _ = dbInstance.rawSqlite("Select * from Sessions") +} diff --git a/internal/configuration/database/provider/sqlite/apikeys.go b/internal/configuration/database/provider/sqlite/apikeys.go new file mode 100644 index 0000000..8390ba6 --- /dev/null +++ b/internal/configuration/database/provider/sqlite/apikeys.go @@ -0,0 +1,79 @@ +package sqlite + +import ( + "database/sql" + "errors" + "github.com/forceu/gokapi/internal/helper" + "github.com/forceu/gokapi/internal/models" +) + +type schemaApiKeys struct { + Id string + FriendlyName string + LastUsed int64 + Permissions int +} + +// GetAllApiKeys returns a map with all API keys +func (p DatabaseProvider) GetAllApiKeys() map[string]models.ApiKey { + result := make(map[string]models.ApiKey) + + rows, err := p.sqliteDb.Query("SELECT * FROM ApiKeys") + helper.Check(err) + defer rows.Close() + for rows.Next() { + rowData := schemaApiKeys{} + err = rows.Scan(&rowData.Id, &rowData.FriendlyName, &rowData.LastUsed, &rowData.Permissions) + helper.Check(err) + result[rowData.Id] = models.ApiKey{ + Id: rowData.Id, + FriendlyName: rowData.FriendlyName, + LastUsed: rowData.LastUsed, + Permissions: uint8(rowData.Permissions), + } + } + return result +} + +// GetApiKey returns a models.ApiKey if valid or false if the ID is not valid +func (p DatabaseProvider) GetApiKey(id string) (models.ApiKey, bool) { + var rowResult schemaApiKeys + row := p.sqliteDb.QueryRow("SELECT * FROM ApiKeys WHERE Id = ?", id) + err := row.Scan(&rowResult.Id, &rowResult.FriendlyName, &rowResult.LastUsed, &rowResult.Permissions) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return models.ApiKey{}, false + } + helper.Check(err) + return models.ApiKey{}, false + } + + result := models.ApiKey{ + Id: rowResult.Id, + FriendlyName: rowResult.FriendlyName, + LastUsed: rowResult.LastUsed, + Permissions: uint8(rowResult.Permissions), + } + + return result, true +} + +// SaveApiKey saves the API key to the database +func (p DatabaseProvider) SaveApiKey(apikey models.ApiKey) { + _, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO ApiKeys (Id, FriendlyName, LastUsed, Permissions) VALUES (?, ?, ?, ?)", + apikey.Id, apikey.FriendlyName, apikey.LastUsed, apikey.Permissions) + helper.Check(err) +} + +// UpdateTimeApiKey writes the content of LastUsage to the database +func (p DatabaseProvider) UpdateTimeApiKey(apikey models.ApiKey) { + _, err := p.sqliteDb.Exec("UPDATE ApiKeys SET LastUsed = ? WHERE Id = ?", + apikey.LastUsed, apikey.Id) + helper.Check(err) +} + +// DeleteApiKey deletes an API key with the given ID +func (p DatabaseProvider) DeleteApiKey(id string) { + _, err := p.sqliteDb.Exec("DELETE FROM ApiKeys WHERE Id = ?", id) + helper.Check(err) +} diff --git a/internal/configuration/database/e2econfig.go b/internal/configuration/database/provider/sqlite/e2econfig.go similarity index 70% rename from internal/configuration/database/e2econfig.go rename to internal/configuration/database/provider/sqlite/e2econfig.go index 2d98607..faf6838 100644 --- a/internal/configuration/database/e2econfig.go +++ b/internal/configuration/database/provider/sqlite/e2econfig.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "bytes" @@ -15,9 +15,7 @@ type schemaE2EConfig struct { } // SaveEnd2EndInfo stores the encrypted e2e info -func SaveEnd2EndInfo(info models.E2EInfoEncrypted) { - - info.AvailableFiles = nil +func (p DatabaseProvider) SaveEnd2EndInfo(info models.E2EInfoEncrypted) { var buf bytes.Buffer enc := gob.NewEncoder(&buf) err := enc.Encode(info) @@ -28,17 +26,17 @@ func SaveEnd2EndInfo(info models.E2EInfoEncrypted) { Config: buf.Bytes(), } - _, err = sqliteDb.Exec("INSERT OR REPLACE INTO E2EConfig (id, Config) VALUES (?, ?)", + _, err = p.sqliteDb.Exec("INSERT OR REPLACE INTO E2EConfig (id, Config) VALUES (?, ?)", newData.Id, newData.Config) helper.Check(err) } // GetEnd2EndInfo retrieves the encrypted e2e info -func GetEnd2EndInfo() models.E2EInfoEncrypted { +func (p DatabaseProvider) GetEnd2EndInfo() models.E2EInfoEncrypted { result := models.E2EInfoEncrypted{} rowResult := schemaE2EConfig{} - row := sqliteDb.QueryRow("SELECT Config FROM E2EConfig WHERE id = 1") + row := p.sqliteDb.QueryRow("SELECT Config FROM E2EConfig WHERE id = 1") err := row.Scan(&rowResult.Config) if err != nil { if errors.Is(err, sql.ErrNoRows) { @@ -52,14 +50,12 @@ func GetEnd2EndInfo() models.E2EInfoEncrypted { dec := gob.NewDecoder(buf) err = dec.Decode(&result) helper.Check(err) - - result.AvailableFiles = GetAllMetaDataIds() return result } // DeleteEnd2EndInfo resets the encrypted e2e info -func DeleteEnd2EndInfo() { +func (p DatabaseProvider) DeleteEnd2EndInfo() { //goland:noinspection SqlWithoutWhere - _, err := sqliteDb.Exec("DELETE FROM E2EConfig") + _, err := p.sqliteDb.Exec("DELETE FROM E2EConfig") helper.Check(err) } diff --git a/internal/configuration/database/hotlinks.go b/internal/configuration/database/provider/sqlite/hotlinks.go similarity index 64% rename from internal/configuration/database/hotlinks.go rename to internal/configuration/database/provider/sqlite/hotlinks.go index 6ffffc9..7716bbd 100644 --- a/internal/configuration/database/hotlinks.go +++ b/internal/configuration/database/provider/sqlite/hotlinks.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "database/sql" @@ -13,9 +13,9 @@ type schemaHotlinks struct { } // GetHotlink returns the id of the file associated or false if not found -func GetHotlink(id string) (string, bool) { +func (p DatabaseProvider) GetHotlink(id string) (string, bool) { var rowResult schemaHotlinks - row := sqliteDb.QueryRow("SELECT FileId FROM Hotlinks WHERE Id = ?", id) + row := p.sqliteDb.QueryRow("SELECT FileId FROM Hotlinks WHERE Id = ?", id) err := row.Scan(&rowResult.FileId) if err != nil { if errors.Is(err, sql.ErrNoRows) { @@ -28,9 +28,9 @@ func GetHotlink(id string) (string, bool) { } // GetAllHotlinks returns an array with all hotlink ids -func GetAllHotlinks() []string { - var ids []string - rows, err := sqliteDb.Query("SELECT Id FROM Hotlinks") +func (p DatabaseProvider) GetAllHotlinks() []string { + ids := make([]string, 0) + rows, err := p.sqliteDb.Query("SELECT Id FROM Hotlinks") helper.Check(err) defer rows.Close() for rows.Next() { @@ -43,22 +43,22 @@ func GetAllHotlinks() []string { } // SaveHotlink stores the hotlink associated with the file in the database -func SaveHotlink(file models.File) { +func (p DatabaseProvider) SaveHotlink(file models.File) { newData := schemaHotlinks{ Id: file.HotlinkId, FileId: file.Id, } - _, err := sqliteDb.Exec("INSERT OR REPLACE INTO Hotlinks (Id, FileId) VALUES (?, ?)", + _, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO Hotlinks (Id, FileId) VALUES (?, ?)", newData.Id, newData.FileId) helper.Check(err) } // DeleteHotlink deletes a hotlink with the given hotlink ID -func DeleteHotlink(id string) { +func (p DatabaseProvider) DeleteHotlink(id string) { if id == "" { return } - _, err := sqliteDb.Exec("DELETE FROM Hotlinks WHERE Id = ?", id) + _, err := p.sqliteDb.Exec("DELETE FROM Hotlinks WHERE Id = ?", id) helper.Check(err) } diff --git a/internal/configuration/database/metadata.go b/internal/configuration/database/provider/sqlite/metadata.go similarity index 85% rename from internal/configuration/database/metadata.go rename to internal/configuration/database/provider/sqlite/metadata.go index 452d673..7a3aa92 100644 --- a/internal/configuration/database/metadata.go +++ b/internal/configuration/database/provider/sqlite/metadata.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "bytes" @@ -55,12 +55,12 @@ func (rowData schemaMetaData) ToFileModel() (models.File, error) { } // GetAllMetadata returns a map of all available files -func GetAllMetadata() map[string]models.File { - if sqliteDb == nil { +func (p DatabaseProvider) GetAllMetadata() map[string]models.File { + if p.sqliteDb == nil { panic("Database not loaded!") } result := make(map[string]models.File) - rows, err := sqliteDb.Query("SELECT * FROM FileMetaData") + rows, err := p.sqliteDb.Query("SELECT * FROM FileMetaData") helper.Check(err) defer rows.Close() for rows.Next() { @@ -79,12 +79,12 @@ func GetAllMetadata() map[string]models.File { } // GetAllMetaDataIds returns all Ids that contain metadata -func GetAllMetaDataIds() []string { - if sqliteDb == nil { +func (p DatabaseProvider) GetAllMetaDataIds() []string { + if p.sqliteDb == nil { panic("Database not loaded!") } - var keys []string - rows, err := sqliteDb.Query("SELECT Id FROM FileMetaData") + keys := make([]string, 0) + rows, err := p.sqliteDb.Query("SELECT Id FROM FileMetaData") helper.Check(err) defer rows.Close() for rows.Next() { @@ -97,11 +97,11 @@ func GetAllMetaDataIds() []string { } // GetMetaDataById returns a models.File from the ID passed or false if the id is not valid -func GetMetaDataById(id string) (models.File, bool) { +func (p DatabaseProvider) GetMetaDataById(id string) (models.File, bool) { result := models.File{} rowData := schemaMetaData{} - row := sqliteDb.QueryRow("SELECT * FROM FileMetaData WHERE Id = ?", id) + row := p.sqliteDb.QueryRow("SELECT * FROM FileMetaData WHERE Id = ?", id) err := row.Scan(&rowData.Id, &rowData.Name, &rowData.Size, &rowData.SHA1, &rowData.ExpireAt, &rowData.SizeBytes, &rowData.ExpireAtString, &rowData.DownloadsRemaining, &rowData.DownloadCount, &rowData.PasswordHash, &rowData.HotlinkId, &rowData.ContentType, &rowData.AwsBucket, &rowData.Encryption, @@ -119,7 +119,7 @@ func GetMetaDataById(id string) (models.File, bool) { } // SaveMetaData stores the metadata of a file to the disk -func SaveMetaData(file models.File) { +func (p DatabaseProvider) SaveMetaData(file models.File) { newData := schemaMetaData{ Id: file.Id, Name: file.Name, @@ -149,7 +149,7 @@ func SaveMetaData(file models.File) { helper.Check(err) newData.Encryption = buf.Bytes() - _, err = sqliteDb.Exec(`INSERT OR REPLACE INTO FileMetaData (Id, Name, Size, SHA1, ExpireAt, SizeBytes, ExpireAtString, + _, err = p.sqliteDb.Exec(`INSERT OR REPLACE INTO FileMetaData (Id, Name, Size, SHA1, ExpireAt, SizeBytes, ExpireAtString, DownloadsRemaining, DownloadCount, PasswordHash, HotlinkId, ContentType, AwsBucket, Encryption, UnlimitedDownloads, UnlimitedTime) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, newData.Id, newData.Name, newData.Size, newData.SHA1, newData.ExpireAt, newData.SizeBytes, newData.ExpireAtString, @@ -159,7 +159,7 @@ func SaveMetaData(file models.File) { } // DeleteMetaData deletes information about a file -func DeleteMetaData(id string) { - _, err := sqliteDb.Exec("DELETE FROM FileMetaData WHERE Id = ?", id) +func (p DatabaseProvider) DeleteMetaData(id string) { + _, err := p.sqliteDb.Exec("DELETE FROM FileMetaData WHERE Id = ?", id) helper.Check(err) } diff --git a/internal/configuration/database/sessions.go b/internal/configuration/database/provider/sqlite/sessions.go similarity index 62% rename from internal/configuration/database/sessions.go rename to internal/configuration/database/provider/sqlite/sessions.go index b4e83f0..f9bc1b7 100644 --- a/internal/configuration/database/sessions.go +++ b/internal/configuration/database/provider/sqlite/sessions.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "database/sql" @@ -15,9 +15,9 @@ type schemaSessions struct { } // GetSession returns the session with the given ID or false if not a valid ID -func GetSession(id string) (models.Session, bool) { +func (p DatabaseProvider) GetSession(id string) (models.Session, bool) { var rowResult schemaSessions - row := sqliteDb.QueryRow("SELECT * FROM Sessions WHERE Id = ?", id) + row := p.sqliteDb.QueryRow("SELECT * FROM Sessions WHERE Id = ?", id) err := row.Scan(&rowResult.Id, &rowResult.RenewAt, &rowResult.ValidUntil) if err != nil { if errors.Is(err, sql.ErrNoRows) { @@ -34,32 +34,32 @@ func GetSession(id string) (models.Session, bool) { } // SaveSession stores the given session. After the expiry passed, it will be deleted automatically -func SaveSession(id string, session models.Session) { +func (p DatabaseProvider) SaveSession(id string, session models.Session) { newData := schemaSessions{ Id: id, RenewAt: session.RenewAt, ValidUntil: session.ValidUntil, } - _, err := sqliteDb.Exec("INSERT OR REPLACE INTO Sessions (Id, RenewAt, ValidUntil) VALUES (?, ?, ?)", + _, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO Sessions (Id, RenewAt, ValidUntil) VALUES (?, ?, ?)", newData.Id, newData.RenewAt, newData.ValidUntil) helper.Check(err) } // DeleteSession deletes a session with the given ID -func DeleteSession(id string) { - _, err := sqliteDb.Exec("DELETE FROM Sessions WHERE Id = ?", id) +func (p DatabaseProvider) DeleteSession(id string) { + _, err := p.sqliteDb.Exec("DELETE FROM Sessions WHERE Id = ?", id) helper.Check(err) } // DeleteAllSessions logs all users out -func DeleteAllSessions() { +func (p DatabaseProvider) DeleteAllSessions() { //goland:noinspection SqlWithoutWhere - _, err := sqliteDb.Exec("DELETE FROM Sessions") + _, err := p.sqliteDb.Exec("DELETE FROM Sessions") helper.Check(err) } -func cleanExpiredSessions() { - _, err := sqliteDb.Exec("DELETE FROM Sessions WHERE Sessions.ValidUntil < ?", time.Now().Unix()) +func (p DatabaseProvider) cleanExpiredSessions() { + _, err := p.sqliteDb.Exec("DELETE FROM Sessions WHERE Sessions.ValidUntil < ?", time.Now().Unix()) helper.Check(err) } diff --git a/internal/configuration/database/uploaddefaults.go b/internal/configuration/database/provider/sqlite/uploaddefaults.go similarity index 70% rename from internal/configuration/database/uploaddefaults.go rename to internal/configuration/database/provider/sqlite/uploaddefaults.go index 9aad005..ee664e2 100644 --- a/internal/configuration/database/uploaddefaults.go +++ b/internal/configuration/database/provider/sqlite/uploaddefaults.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "database/sql" @@ -18,24 +18,16 @@ type schemaUploadConfig struct { // GetUploadDefaults returns the last used setting for amount of downloads allowed, last expiry in days and // a password for the file -func GetUploadDefaults() models.LastUploadValues { - defaultValues := models.LastUploadValues{ - Downloads: 1, - TimeExpiry: 14, - Password: "", - UnlimitedDownload: false, - UnlimitedTime: false, - } - +func (p DatabaseProvider) GetUploadDefaults() (models.LastUploadValues, bool) { rowResult := schemaUploadConfig{} - row := sqliteDb.QueryRow("SELECT * FROM UploadConfig WHERE id = 1") + row := p.sqliteDb.QueryRow("SELECT * FROM UploadConfig WHERE id = 1") err := row.Scan(&rowResult.Id, &rowResult.Downloads, &rowResult.TimeExpiry, &rowResult.Password, &rowResult.UnlimitedDownloads, &rowResult.UnlimitedTime) if err != nil { if errors.Is(err, sql.ErrNoRows) { - return defaultValues + return models.LastUploadValues{}, false } helper.Check(err) - return defaultValues + return models.LastUploadValues{}, false } result := models.LastUploadValues{ @@ -45,11 +37,11 @@ func GetUploadDefaults() models.LastUploadValues { UnlimitedDownload: rowResult.UnlimitedDownloads == 1, UnlimitedTime: rowResult.UnlimitedTime == 1, } - return result + return result, true } // SaveUploadDefaults saves the last used setting for an upload -func SaveUploadDefaults(values models.LastUploadValues) { +func (p DatabaseProvider) SaveUploadDefaults(values models.LastUploadValues) { newData := schemaUploadConfig{ Downloads: values.Downloads, @@ -63,7 +55,7 @@ func SaveUploadDefaults(values models.LastUploadValues) { newData.UnlimitedTime = 1 } - _, err := sqliteDb.Exec("INSERT OR REPLACE INTO UploadConfig (id, Downloads,TimeExpiry,Password,UnlimitedDownloads,UnlimitedTime) VALUES (1, ?, ?, ?, ?, ?)", + _, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO UploadConfig (id, Downloads,TimeExpiry,Password,UnlimitedDownloads,UnlimitedTime) VALUES (1, ?, ?, ?, ?, ?)", newData.Downloads, newData.TimeExpiry, newData.Password, newData.UnlimitedDownloads, newData.UnlimitedTime) helper.Check(err) } diff --git a/internal/configuration/database/uploadstatus.go b/internal/configuration/database/provider/sqlite/uploadstatus.go similarity index 70% rename from internal/configuration/database/uploadstatus.go rename to internal/configuration/database/provider/sqlite/uploadstatus.go index 3bb6d7b..6df79d5 100644 --- a/internal/configuration/database/uploadstatus.go +++ b/internal/configuration/database/provider/sqlite/uploadstatus.go @@ -1,4 +1,4 @@ -package database +package sqlite import ( "database/sql" @@ -11,14 +11,13 @@ import ( type schemaUploadStatus struct { ChunkId string CurrentStatus int - LastUpdate int64 CreationDate int64 } // GetAllUploadStatus returns all UploadStatus values from the past 24 hours -func GetAllUploadStatus() []models.UploadStatus { - var result []models.UploadStatus - rows, err := sqliteDb.Query("SELECT * FROM UploadStatus") +func (p DatabaseProvider) GetAllUploadStatus() []models.UploadStatus { + var result = make([]models.UploadStatus, 0) + rows, err := p.sqliteDb.Query("SELECT * FROM UploadStatus") helper.Check(err) defer rows.Close() for rows.Next() { @@ -34,14 +33,14 @@ func GetAllUploadStatus() []models.UploadStatus { } // GetUploadStatus returns a models.UploadStatus from the ID passed or false if the id is not valid -func GetUploadStatus(id string) (models.UploadStatus, bool) { +func (p DatabaseProvider) GetUploadStatus(id string) (models.UploadStatus, bool) { result := models.UploadStatus{ ChunkId: id, CurrentStatus: 0, } var rowResult schemaUploadStatus - row := sqliteDb.QueryRow("SELECT * FROM UploadStatus WHERE ChunkId = ?", id) + row := p.sqliteDb.QueryRow("SELECT * FROM UploadStatus WHERE ChunkId = ?", id) err := row.Scan(&rowResult.ChunkId, &rowResult.CurrentStatus, &rowResult.CreationDate) if err != nil { if errors.Is(err, sql.ErrNoRows) { @@ -60,19 +59,19 @@ var currentTime = func() time.Time { } // SaveUploadStatus stores the upload status of a new file for 24 hours -func SaveUploadStatus(status models.UploadStatus) { +func (p DatabaseProvider) SaveUploadStatus(status models.UploadStatus) { newData := schemaUploadStatus{ ChunkId: status.ChunkId, CurrentStatus: status.CurrentStatus, CreationDate: currentTime().Unix(), } - _, err := sqliteDb.Exec("INSERT OR REPLACE INTO UploadStatus (ChunkId, CurrentStatus, CreationDate) VALUES (?, ?, ?)", + _, err := p.sqliteDb.Exec("INSERT OR REPLACE INTO UploadStatus (ChunkId, CurrentStatus, CreationDate) VALUES (?, ?, ?)", newData.ChunkId, newData.CurrentStatus, newData.CreationDate) helper.Check(err) } -func cleanUploadStatus() { - _, err := sqliteDb.Exec("DELETE FROM UploadStatus WHERE CreationDate < ?", currentTime().Add(-time.Hour*24).Unix()) +func (p DatabaseProvider) cleanUploadStatus() { + _, err := p.sqliteDb.Exec("DELETE FROM UploadStatus WHERE CreationDate < ?", currentTime().Add(-time.Hour*24).Unix()) helper.Check(err) } diff --git a/internal/configuration/setup/Setup.go b/internal/configuration/setup/Setup.go index 8efcbd6..e47388d 100644 --- a/internal/configuration/setup/Setup.go +++ b/internal/configuration/setup/Setup.go @@ -10,6 +10,7 @@ import ( "github.com/forceu/gokapi/internal/configuration/cloudconfig" "github.com/forceu/gokapi/internal/configuration/configupgrade" "github.com/forceu/gokapi/internal/configuration/database" + "github.com/forceu/gokapi/internal/configuration/database/dbabstraction" "github.com/forceu/gokapi/internal/encryption" "github.com/forceu/gokapi/internal/environment" "github.com/forceu/gokapi/internal/helper" @@ -23,6 +24,7 @@ import ( "log" "net" "net/http" + "net/url" "os" "runtime" "strconv" @@ -242,6 +244,11 @@ func toConfiguration(formObjects *[]jsonFormObject) (models.Configuration, *clou result.Authentication = configuration.Get().Authentication } + err = parseDatabaseSettings(&result, formObjects) + if err != nil { + return models.Configuration{}, nil, err + } + err = parseBasicAuthSettings(&result, formObjects) if err != nil { return models.Configuration{}, nil, err @@ -276,6 +283,80 @@ func toConfiguration(formObjects *[]jsonFormObject) (models.Configuration, *clou return result, cloudSettings, nil } +func parseDatabaseSettings(result *models.Configuration, formObjects *[]jsonFormObject) error { + dbType, err := getFormValueInt(formObjects, "dbtype_sel") + if err != nil { + return err + } + err = checkForAllDbValues(formObjects) + if err != nil { + return err + } + switch dbType { + case dbabstraction.TypeSqlite: + location, err := getFormValueString(formObjects, "sqlite_location") + if err != nil { + return err + } + result.DatabaseUrl = "sqlite://" + location + return nil + case dbabstraction.TypeRedis: + host, err := getFormValueString(formObjects, "redis_location") + if err != nil { + return err + } + prefix, err := getFormValueString(formObjects, "redis_prefix") + if err != nil { + return err + } + rUser, err := getFormValueString(formObjects, "redis_user") + if err != nil { + return err + } + rPassword, err := getFormValueString(formObjects, "redis_password") + if err != nil { + return err + } + useSsl, err := getFormValueBool(formObjects, "redis_ssl_sel") + if err != nil { + return err + } + dbUrl := url.URL{ + Scheme: "redis", + Host: host, + } + query := url.Values{} + if prefix != "" { + query.Set("prefix", prefix) + } + if useSsl { + query.Set("ssl", "true") + } + if rUser != "" || rPassword != "" { + dbUrl.User = url.UserPassword(rUser, rPassword) + } + dbUrl.RawQuery = query.Encode() + result.DatabaseUrl = dbUrl.String() + return nil + default: + return errors.New("unsupported database selected") + } +} + +// checkForAllDbValues tests if all values were passed, even if they were not required for this particular database +// This is done to ensure that no invalid form was passed and makes testing easier +func checkForAllDbValues(formObjects *[]jsonFormObject) error { + expectedValues := []string{"dbtype_sel", "sqlite_location", "redis_location", "redis_prefix", "redis_user", "redis_password"} + for _, value := range expectedValues { + _, err := getFormValueString(formObjects, value) + if err != nil { + return err + } + } + _, err := getFormValueBool(formObjects, "redis_ssl_sel") + return err +} + func parseBasicAuthSettings(result *models.Configuration, formObjects *[]jsonFormObject) error { var err error result.Authentication.Username, err = getFormValueString(formObjects, "auth_username") @@ -588,19 +669,20 @@ func splitAndTrim(input string) []string { } type setupView struct { - IsInitialSetup bool - LocalhostOnly bool - HasAwsFeature bool - IsDocker bool - S3EnvProvided bool - Port int - OAuthUsers string - OAuthGroups string - HeaderUsers string - Auth models.AuthenticationConfig - Settings models.Configuration - CloudSettings cloudconfig.CloudConfig - ProtectedUrls []string + IsInitialSetup bool + LocalhostOnly bool + HasAwsFeature bool + IsDocker bool + S3EnvProvided bool + Port int + OAuthUsers string + OAuthGroups string + HeaderUsers string + Auth models.AuthenticationConfig + Settings models.Configuration + CloudSettings cloudconfig.CloudConfig + DatabaseSettings models.DbConnection + ProtectedUrls []string } func (v *setupView) loadFromConfig() { @@ -632,6 +714,10 @@ func (v *setupView) loadFromConfig() { } env := environment.New() v.S3EnvProvided = env.IsAwsProvided() + + dbSettings, err := database.ParseUrl(settings.DatabaseUrl, false) + helper.Check(err) + v.DatabaseSettings = dbSettings } // Handling of /start diff --git a/internal/configuration/setup/Setup_test.go b/internal/configuration/setup/Setup_test.go index 024c184..b338491 100644 --- a/internal/configuration/setup/Setup_test.go +++ b/internal/configuration/setup/Setup_test.go @@ -92,6 +92,7 @@ func TestEncryptionSetup(t *testing.T) { testconfiguration.Create(false) configuration.Load() + configuration.ConnectDatabase() configuration.Get().Encryption.Level = 3 id := testconfiguration.WriteEncryptedFile() file, ok := database.GetMetaDataById(id) @@ -232,6 +233,65 @@ func TestInitialSetup(t *testing.T) { test.IsEqualBool(t, isInitialSetup, true) } +type dbFormTest struct { + DatabaseType string `form:"dbtype_sel"` + SqliteLocation string `form:"sqlite_location"` + RedisLocation string `form:"redis_location"` + RedisPrefix string `form:"redis_prefix"` + RedisUser string `form:"redis_user"` + RedisPw string `form:"redis_password"` + RedisUseSsl string `form:"redis_ssl_sel"` +} + +func generateDbFormValues(input dbFormTest) []jsonFormObject { + result := make([]jsonFormObject, 0) + v := reflect.ValueOf(input) + t := v.Type() + for i := 0; i < v.NumField(); i++ { + result = append(result, jsonFormObject{ + Name: t.Field(i).Tag.Get("form"), + Value: v.Field(i).Interface().(string), + }) + } + return result +} + +func TestParseDatabaseSettings(t *testing.T) { + output := models.Configuration{} + input := generateDbFormValues(dbFormTest{ + DatabaseType: "0", + SqliteLocation: "./data/test.sqlite", + RedisUseSsl: "0", + }) + expected := "sqlite://./data/test.sqlite" + err := parseDatabaseSettings(&output, &input) + test.IsNil(t, err) + test.IsEqualString(t, output.DatabaseUrl, expected) + + input = generateDbFormValues(dbFormTest{ + DatabaseType: "1", + RedisLocation: "127.0.0.1:1234", + RedisUseSsl: "0", + }) + expected = "redis://127.0.0.1:1234" + err = parseDatabaseSettings(&output, &input) + test.IsNil(t, err) + test.IsEqualString(t, output.DatabaseUrl, expected) + + input = generateDbFormValues(dbFormTest{ + DatabaseType: "1", + RedisLocation: "127.0.0.1:1234", + RedisPrefix: "pre_", + RedisUser: "testuser", + RedisPw: "testpw", + RedisUseSsl: "1", + }) + expected = "redis://testuser:testpw@127.0.0.1:1234?prefix=pre_&ssl=true" + err = parseDatabaseSettings(&output, &input) + test.IsNil(t, err) + test.IsEqualString(t, output.DatabaseUrl, expected) +} + func TestRunConfigModification(t *testing.T) { testconfiguration.Create(false) username = "" @@ -499,6 +559,13 @@ type setupValues struct { S3Endpoint setupEntry `form:"s3_endpoint"` EncryptionLevel setupEntry `form:"encrypt_sel" isInt:"true"` EncryptionPassword setupEntry `form:"enc_pw"` + DatabaseType setupEntry `form:"dbtype_sel" isInt:"true"` + SqliteLocation setupEntry `form:"sqlite_location"` + RedisLocation setupEntry `form:"redis_location"` + RedisPrefix setupEntry `form:"redis_prefix"` + RedisUser setupEntry `form:"redis_user"` + RedisPw setupEntry `form:"redis_password"` + RedisUseSsl setupEntry `form:"redis_ssl_sel" isBool:"true"` } func (s *setupValues) init() { @@ -615,6 +682,9 @@ func createInputInternalAuth() setupValues { values.OAuthRestrictUser.Value = "false" values.OAuthRestrictGroups.Value = "false" values.OAuthRecheckInterval.Value = "12" + values.DatabaseType.Value = "0" + values.SqliteLocation.Value = "./test/gokapi.sqlite" + values.RedisUseSsl.Value = "0" return values } @@ -639,6 +709,9 @@ func createInputHeaderAuth() setupValues { values.OAuthRestrictGroups.Value = "false" values.OAuthRecheckInterval.Value = "12" values.IncludeFilename.Value = "0" + values.DatabaseType.Value = "0" + values.SqliteLocation.Value = "./test/gokapi.sqlite" + values.RedisUseSsl.Value = "0" return values } diff --git a/internal/configuration/setup/templates/setup.tmpl b/internal/configuration/setup/templates/setup.tmpl index 458267f..be015e0 100644 --- a/internal/configuration/setup/templates/setup.tmpl +++ b/internal/configuration/setup/templates/setup.tmpl @@ -92,9 +92,44 @@ + +
+ - - + - + - + - + - + - +