mirror of
https://github.com/btouchard/ackify.git
synced 2026-02-10 07:48:31 -06:00
refactor(api): centralize pagination logic in shared package
Changes: - Add PaginationParams struct for query parameter handling - Add ParsePaginationParams() to parse and validate pagination from HTTP requests - Add Validate() method with configurable min/max constraints - Support both 'limit' and 'page_size' query parameters for flexibility - Migrate documents handler (default: 20/page, max: 100) - Migrate admin handler (default: 100/page, max: 200) - Remove duplicated strconv imports and validation logic
This commit is contained in:
@@ -6,7 +6,6 @@ import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/btouchard/ackify-ce/backend/internal/domain/models"
|
||||
"github.com/btouchard/ackify-ce/backend/internal/infrastructure/i18n"
|
||||
@@ -118,39 +117,24 @@ func (h *Handler) HandleListDocuments(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
// Parse pagination and search parameters
|
||||
page := 1
|
||||
limit := 100
|
||||
pagination := shared.ParsePaginationParams(r, 100, 200)
|
||||
searchQuery := r.URL.Query().Get("search")
|
||||
|
||||
if p := r.URL.Query().Get("page"); p != "" {
|
||||
if parsed, err := strconv.Atoi(p); err == nil && parsed > 0 {
|
||||
page = parsed
|
||||
}
|
||||
}
|
||||
|
||||
if l := r.URL.Query().Get("limit"); l != "" {
|
||||
if parsed, err := strconv.Atoi(l); err == nil && parsed > 0 && parsed <= 200 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
offset := (page - 1) * limit
|
||||
|
||||
// Fetch documents with or without search
|
||||
var documents []*models.Document
|
||||
var err error
|
||||
|
||||
if searchQuery != "" {
|
||||
documents, err = h.documentRepo.Search(ctx, searchQuery, limit, offset)
|
||||
documents, err = h.documentRepo.Search(ctx, searchQuery, pagination.PageSize, pagination.Offset)
|
||||
logger.Logger.Debug("Admin document search",
|
||||
"query", searchQuery,
|
||||
"limit", limit,
|
||||
"offset", offset)
|
||||
"limit", pagination.PageSize,
|
||||
"offset", pagination.Offset)
|
||||
} else {
|
||||
documents, err = h.documentRepo.List(ctx, limit, offset)
|
||||
documents, err = h.documentRepo.List(ctx, pagination.PageSize, pagination.Offset)
|
||||
logger.Logger.Debug("Admin document list",
|
||||
"limit", limit,
|
||||
"offset", offset)
|
||||
"limit", pagination.PageSize,
|
||||
"offset", pagination.Offset)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@@ -176,9 +160,9 @@ func (h *Handler) HandleListDocuments(w http.ResponseWriter, r *http.Request) {
|
||||
meta := map[string]interface{}{
|
||||
"total": totalCount, // Total matching documents in DB
|
||||
"count": len(documents), // Count in this page
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"page": page,
|
||||
"limit": pagination.PageSize,
|
||||
"offset": pagination.Offset,
|
||||
"page": pagination.Page,
|
||||
}
|
||||
|
||||
if searchQuery != "" {
|
||||
|
||||
@@ -215,29 +215,13 @@ func (h *Handler) HandleCreateDocument(w http.ResponseWriter, r *http.Request) {
|
||||
func (h *Handler) HandleListDocuments(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
// Parse query parameters
|
||||
page := 1
|
||||
limit := 20
|
||||
// Parse pagination and search parameters
|
||||
pagination := shared.ParsePaginationParams(r, 20, 100)
|
||||
searchQuery := r.URL.Query().Get("search")
|
||||
|
||||
if p := r.URL.Query().Get("page"); p != "" {
|
||||
if parsed, err := strconv.Atoi(p); err == nil && parsed > 0 {
|
||||
page = parsed
|
||||
}
|
||||
}
|
||||
|
||||
if l := r.URL.Query().Get("limit"); l != "" {
|
||||
if parsed, err := strconv.Atoi(l); err == nil && parsed > 0 && parsed <= 100 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate offset for pagination
|
||||
offset := (page - 1) * limit
|
||||
|
||||
// If no document repository is available, return empty list (backward compat)
|
||||
if h.documentRepo == nil {
|
||||
shared.WritePaginatedJSON(w, []DocumentDTO{}, page, limit, 0)
|
||||
shared.WritePaginatedJSON(w, []DocumentDTO{}, pagination.Page, pagination.PageSize, 0)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -247,17 +231,17 @@ func (h *Handler) HandleListDocuments(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
if searchQuery != "" {
|
||||
// Use search if query is provided
|
||||
docs, err = h.documentRepo.Search(ctx, searchQuery, limit, offset)
|
||||
docs, err = h.documentRepo.Search(ctx, searchQuery, pagination.PageSize, pagination.Offset)
|
||||
logger.Logger.Debug("Public document search request",
|
||||
"query", searchQuery,
|
||||
"limit", limit,
|
||||
"offset", offset)
|
||||
"limit", pagination.PageSize,
|
||||
"offset", pagination.Offset)
|
||||
} else {
|
||||
// Otherwise, list all documents
|
||||
docs, err = h.documentRepo.List(ctx, limit, offset)
|
||||
docs, err = h.documentRepo.List(ctx, pagination.PageSize, pagination.Offset)
|
||||
logger.Logger.Debug("Public document list request",
|
||||
"limit", limit,
|
||||
"offset", offset)
|
||||
"limit", pagination.PageSize,
|
||||
"offset", pagination.Offset)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@@ -303,7 +287,7 @@ func (h *Handler) HandleListDocuments(w http.ResponseWriter, r *http.Request) {
|
||||
documents = append(documents, dto)
|
||||
}
|
||||
|
||||
shared.WritePaginatedJSON(w, documents, page, limit, totalCount)
|
||||
shared.WritePaginatedJSON(w, documents, pagination.Page, pagination.PageSize, totalCount)
|
||||
}
|
||||
|
||||
// HandleGetDocument handles GET /api/v1/documents/{docId}
|
||||
|
||||
@@ -4,6 +4,7 @@ package shared
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Response represents a standardized API response
|
||||
@@ -20,6 +21,74 @@ type PaginationMeta struct {
|
||||
TotalPages int `json:"totalPages"`
|
||||
}
|
||||
|
||||
// PaginationParams represents pagination query parameters
|
||||
type PaginationParams struct {
|
||||
Page int `json:"page" schema:"page"`
|
||||
PageSize int `json:"page_size" schema:"page_size"`
|
||||
Offset int `json:"-"`
|
||||
}
|
||||
|
||||
// NewPaginationParams creates pagination parameters with default values
|
||||
func NewPaginationParams(defaultPage, defaultPageSize, maxPageSize int) *PaginationParams {
|
||||
if defaultPage < 1 {
|
||||
defaultPage = 1
|
||||
}
|
||||
if defaultPageSize < 1 {
|
||||
defaultPageSize = 20
|
||||
}
|
||||
if maxPageSize < 1 {
|
||||
maxPageSize = 100
|
||||
}
|
||||
|
||||
return &PaginationParams{
|
||||
Page: defaultPage,
|
||||
PageSize: defaultPageSize,
|
||||
}
|
||||
}
|
||||
|
||||
// ParsePaginationParams parses pagination parameters from HTTP request query string
|
||||
// and validates them against min/max constraints
|
||||
func ParsePaginationParams(r *http.Request, defaultPageSize, maxPageSize int) *PaginationParams {
|
||||
params := NewPaginationParams(1, defaultPageSize, maxPageSize)
|
||||
|
||||
// Parse page parameter
|
||||
if pageStr := r.URL.Query().Get("page"); pageStr != "" {
|
||||
if page, err := strconv.Atoi(pageStr); err == nil && page > 0 {
|
||||
params.Page = page
|
||||
}
|
||||
}
|
||||
|
||||
// Parse limit/page_size parameter (support both names)
|
||||
pageSizeStr := r.URL.Query().Get("limit")
|
||||
if pageSizeStr == "" {
|
||||
pageSizeStr = r.URL.Query().Get("page_size")
|
||||
}
|
||||
if pageSizeStr != "" {
|
||||
if pageSize, err := strconv.Atoi(pageSizeStr); err == nil && pageSize > 0 {
|
||||
params.PageSize = pageSize
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and calculate
|
||||
params.Validate(maxPageSize)
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
// Validate validates pagination parameters and calculates offset
|
||||
func (p *PaginationParams) Validate(maxPageSize int) {
|
||||
if p.Page < 1 {
|
||||
p.Page = 1
|
||||
}
|
||||
if p.PageSize < 1 {
|
||||
p.PageSize = 20
|
||||
}
|
||||
if maxPageSize > 0 && p.PageSize > maxPageSize {
|
||||
p.PageSize = maxPageSize
|
||||
}
|
||||
p.Offset = (p.Page - 1) * p.PageSize
|
||||
}
|
||||
|
||||
// WriteJSON writes a JSON response
|
||||
func WriteJSON(w http.ResponseWriter, statusCode int, data interface{}) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
Reference in New Issue
Block a user