mirror of
https://github.com/eduardolat/pgbackweb.git
synced 2026-01-17 09:39:39 -06:00
Add BrowserCacheMiddleware to improve static file caching, excluding specific files like robots.txt for better performance
This commit is contained in:
38
internal/view/middleware/browser_cache.go
Normal file
38
internal/view/middleware/browser_cache.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
type BrowserCacheMiddlewareConfig struct {
|
||||
CacheDuration time.Duration
|
||||
ExcludedFiles []string
|
||||
}
|
||||
|
||||
// NewBrowserCacheMiddleware creates a new CacheMiddleware with the specified
|
||||
// cache duration and a list of excluded files that will bypass the cache.
|
||||
func (Middleware) NewBrowserCacheMiddleware(
|
||||
config BrowserCacheMiddlewareConfig,
|
||||
) echo.MiddlewareFunc {
|
||||
return func(next echo.HandlerFunc) echo.HandlerFunc {
|
||||
return func(c echo.Context) error {
|
||||
path := c.Request().URL.Path
|
||||
for _, excluded := range config.ExcludedFiles {
|
||||
if path == excluded {
|
||||
return next(c)
|
||||
}
|
||||
}
|
||||
|
||||
cacheDuration := config.CacheDuration
|
||||
c.Response().Header().Set(
|
||||
"Cache-Control",
|
||||
"public, max-age="+strconv.Itoa(int(cacheDuration.Seconds())),
|
||||
)
|
||||
|
||||
return next(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
package view
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/eduardolat/pgbackweb/internal/service"
|
||||
"github.com/eduardolat/pgbackweb/internal/view/api"
|
||||
"github.com/eduardolat/pgbackweb/internal/view/middleware"
|
||||
@@ -12,7 +14,13 @@ import (
|
||||
func MountRouter(app *echo.Echo, servs *service.Service) {
|
||||
mids := middleware.New(servs)
|
||||
|
||||
app.StaticFS("", static.StaticFs)
|
||||
browserCache := mids.NewBrowserCacheMiddleware(
|
||||
middleware.BrowserCacheMiddlewareConfig{
|
||||
CacheDuration: time.Hour * 24 * 30,
|
||||
ExcludedFiles: []string{"/robots.txt"},
|
||||
},
|
||||
)
|
||||
app.Group("", browserCache).StaticFS("", static.StaticFs)
|
||||
|
||||
apiGroup := app.Group("/api")
|
||||
api.MountRouter(apiGroup, mids, servs)
|
||||
|
||||
Reference in New Issue
Block a user