From 85192b8b3da2c698f837fb04457cd57cdc591274 Mon Sep 17 00:00:00 2001
From: seniorswe
Date: Sat, 4 Oct 2025 15:36:32 -0400
Subject: [PATCH] Fix badges on user page. Fix logging page
---
backend-services/doorman.py | 38 +++++++-
backend-services/utils/bandwidth_util.py | 3 +-
backend-services/utils/limit_throttle_util.py | 19 ++--
backend-services/utils/metrics_util.py | 90 +++++++++++++++++++
web-client/src/app/logging/page.tsx | 55 +-----------
web-client/src/app/users/[username]/page.tsx | 53 +++++++----
web-client/src/app/users/add/page.tsx | 11 ++-
7 files changed, 186 insertions(+), 83 deletions(-)
diff --git a/backend-services/doorman.py b/backend-services/doorman.py
index 2a52f53..04d1502 100755
--- a/backend-services/doorman.py
+++ b/backend-services/doorman.py
@@ -117,6 +117,28 @@ async def app_lifespan(app: FastAPI):
app.state._purger_task = asyncio.create_task(automatic_purger(1800))
+ # Restore persisted metrics (if available)
+ METRICS_FILE = os.path.join(LOGS_DIR, 'metrics.json')
+ try:
+ metrics_store.load_from_file(METRICS_FILE)
+ except Exception as e:
+ gateway_logger.debug(f'Metrics restore skipped: {e}')
+
+ # Start periodic metrics saver
+ async def _metrics_autosave(interval_s: int = 60):
+ while True:
+ try:
+ await asyncio.sleep(interval_s)
+ metrics_store.save_to_file(METRICS_FILE)
+ except asyncio.CancelledError:
+ break
+ except Exception:
+ pass
+ try:
+ app.state._metrics_save_task = asyncio.create_task(_metrics_autosave(60))
+ except Exception:
+ app.state._metrics_save_task = None
+
try:
await load_settings()
await start_auto_save_task()
@@ -213,6 +235,20 @@ async def app_lifespan(app: FastAPI):
except Exception:
pass
+ # Persist metrics on shutdown
+ try:
+ METRICS_FILE = os.path.join(LOGS_DIR, 'metrics.json')
+ metrics_store.save_to_file(METRICS_FILE)
+ except Exception:
+ pass
+ # Stop autosave task
+ try:
+ t = getattr(app.state, '_metrics_save_task', None)
+ if t:
+ t.cancel()
+ except Exception:
+ pass
+
def _generate_unique_id(route):
try:
name = getattr(route, 'name', 'op') or 'op'
@@ -614,7 +650,7 @@ async def metrics_middleware(request: Request, call_next):
if username:
from utils.bandwidth_util import add_usage, _get_user
u = _get_user(username)
- # Track usage only if not explicitly disabled and a limit is configured
+ # Track usage when limit is set unless explicitly disabled
if u and u.get('bandwidth_limit_bytes') and u.get('bandwidth_limit_enabled') is not False:
add_usage(username, int(bytes_in) + int(clen), u.get('bandwidth_limit_window') or 'day')
except Exception:
diff --git a/backend-services/utils/bandwidth_util.py b/backend-services/utils/bandwidth_util.py
index 7463fda..f039e5a 100644
--- a/backend-services/utils/bandwidth_util.py
+++ b/backend-services/utils/bandwidth_util.py
@@ -81,8 +81,7 @@ async def enforce_pre_request_limit(request: Request, username: Optional[str]) -
user = _get_user(username)
if not user:
return
- # Only enforce if explicitly enabled or not disabled.
- # Backwards compatibility: if field is absent (None), treat as enabled when limit > 0
+ # Enforce when limit is set unless explicitly disabled
if user.get('bandwidth_limit_enabled') is False:
return
limit = user.get('bandwidth_limit_bytes')
diff --git a/backend-services/utils/limit_throttle_util.py b/backend-services/utils/limit_throttle_util.py
index 4da1a83..7224549 100644
--- a/backend-services/utils/limit_throttle_util.py
+++ b/backend-services/utils/limit_throttle_util.py
@@ -59,10 +59,12 @@ async def limit_and_throttle(request: Request):
if not user:
user = user_collection.find_one({'username': username})
now_ms = int(time.time() * 1000)
- # Rate limiting (skip if explicitly disabled)
- if user.get('rate_limit_enabled') is not False:
- rate = int(user.get('rate_limit_duration') or 1)
- duration = user.get('rate_limit_duration_type', 'minute')
+ # Rate limiting (enabled if explicitly set true, or legacy values exist)
+ rate_enabled = (user.get('rate_limit_enabled') is True) or bool(user.get('rate_limit_duration'))
+ if rate_enabled:
+ # Use user-set values; if explicitly enabled but missing values, fall back to sensible defaults
+ rate = int(user.get('rate_limit_duration') or 60)
+ duration = user.get('rate_limit_duration_type') or 'minute'
window = duration_to_seconds(duration)
key = f'rate_limit:{username}:{now_ms // (window * 1000)}'
try:
@@ -77,10 +79,11 @@ async def limit_and_throttle(request: Request):
if count > rate:
raise HTTPException(status_code=429, detail='Rate limit exceeded')
- # Throttling (skip if explicitly disabled)
- if user.get('throttle_enabled') is not False:
- throttle_limit = int(user.get('throttle_duration') or 5)
- throttle_duration = user.get('throttle_duration_type', 'second')
+ # Throttling (enabled if explicitly set true, or legacy values exist)
+ throttle_enabled = (user.get('throttle_enabled') is True) or bool(user.get('throttle_duration'))
+ if throttle_enabled:
+ throttle_limit = int(user.get('throttle_duration') or 10)
+ throttle_duration = user.get('throttle_duration_type') or 'second'
throttle_window = duration_to_seconds(throttle_duration)
throttle_key = f'throttle_limit:{username}:{now_ms // (throttle_window * 1000)}'
try:
diff --git a/backend-services/utils/metrics_util.py b/backend-services/utils/metrics_util.py
index 22570eb..5be942b 100644
--- a/backend-services/utils/metrics_util.py
+++ b/backend-services/utils/metrics_util.py
@@ -9,6 +9,8 @@ import time
from collections import defaultdict, deque
from dataclasses import dataclass, field
from typing import Deque, Dict, List, Optional
+import json
+import os
@dataclass
class MinuteBucket:
@@ -48,6 +50,39 @@ class MinuteBucket:
except Exception:
pass
+ def to_dict(self) -> Dict:
+ return {
+ 'start_ts': self.start_ts,
+ 'count': self.count,
+ 'error_count': self.error_count,
+ 'total_ms': self.total_ms,
+ 'bytes_in': self.bytes_in,
+ 'bytes_out': self.bytes_out,
+ 'status_counts': dict(self.status_counts or {}),
+ 'api_counts': dict(self.api_counts or {}),
+ 'api_error_counts': dict(self.api_error_counts or {}),
+ 'user_counts': dict(self.user_counts or {}),
+ }
+
+ @staticmethod
+ def from_dict(d: Dict) -> 'MinuteBucket':
+ mb = MinuteBucket(
+ start_ts=int(d.get('start_ts', 0)),
+ count=int(d.get('count', 0)),
+ error_count=int(d.get('error_count', 0)),
+ total_ms=float(d.get('total_ms', 0.0)),
+ bytes_in=int(d.get('bytes_in', 0)),
+ bytes_out=int(d.get('bytes_out', 0)),
+ )
+ try:
+ mb.status_counts = dict(d.get('status_counts') or {})
+ mb.api_counts = dict(d.get('api_counts') or {})
+ mb.api_error_counts = dict(d.get('api_error_counts') or {})
+ mb.user_counts = dict(d.get('user_counts') or {})
+ except Exception:
+ pass
+ return mb
+
if username:
try:
self.user_counts[username] = self.user_counts.get(username, 0) + 1
@@ -170,5 +205,60 @@ class MetricsStore:
'top_apis': sorted(self.api_counts.items(), key=lambda kv: kv[1], reverse=True)[:10],
}
+ def to_dict(self) -> Dict:
+ return {
+ 'total_requests': int(self.total_requests),
+ 'total_ms': float(self.total_ms),
+ 'total_bytes_in': int(self.total_bytes_in),
+ 'total_bytes_out': int(self.total_bytes_out),
+ 'status_counts': dict(self.status_counts),
+ 'username_counts': dict(self.username_counts),
+ 'api_counts': dict(self.api_counts),
+ 'buckets': [b.to_dict() for b in list(self._buckets)],
+ }
+
+ def load_dict(self, data: Dict) -> None:
+ try:
+ self.total_requests = int(data.get('total_requests', 0))
+ self.total_ms = float(data.get('total_ms', 0.0))
+ self.total_bytes_in = int(data.get('total_bytes_in', 0))
+ self.total_bytes_out = int(data.get('total_bytes_out', 0))
+ self.status_counts = defaultdict(int, data.get('status_counts') or {})
+ self.username_counts = defaultdict(int, data.get('username_counts') or {})
+ self.api_counts = defaultdict(int, data.get('api_counts') or {})
+ self._buckets.clear()
+ for bd in data.get('buckets', []):
+ try:
+ self._buckets.append(MinuteBucket.from_dict(bd))
+ except Exception:
+ continue
+ except Exception:
+ # If anything goes wrong, keep current in-memory metrics
+ pass
+
+ def save_to_file(self, path: str) -> None:
+ try:
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+ except Exception:
+ pass
+ try:
+ tmp = path + '.tmp'
+ with open(tmp, 'w', encoding='utf-8') as f:
+ json.dump(self.to_dict(), f)
+ os.replace(tmp, path)
+ except Exception:
+ pass
+
+ def load_from_file(self, path: str) -> None:
+ try:
+ if not os.path.exists(path):
+ return
+ with open(path, 'r', encoding='utf-8') as f:
+ data = json.load(f)
+ if isinstance(data, dict):
+ self.load_dict(data)
+ except Exception:
+ pass
+
# Global metrics store
metrics_store = MetricsStore()
diff --git a/web-client/src/app/logging/page.tsx b/web-client/src/app/logging/page.tsx
index fb2f786..75fcdba 100644
--- a/web-client/src/app/logging/page.tsx
+++ b/web-client/src/app/logging/page.tsx
@@ -68,9 +68,7 @@ export default function LogsPage() {
const [error, setError] = useState(null)
const [showMoreFilters, setShowMoreFilters] = useState(false)
const [exporting, setExporting] = useState(false)
- const [filesLoading, setFilesLoading] = useState(false)
- const [filesError, setFilesError] = useState(null)
- const [logFiles, setLogFiles] = useState([])
+ // Removed log files listing per requirements
const [expandedRequests, setExpandedRequests] = useState>(new Set())
const [loadingExpanded, setLoadingExpanded] = useState>(new Set())
const [currentRequestId, setCurrentRequestId] = useState(null)
@@ -211,30 +209,7 @@ export default function LogsPage() {
}
}, [filters, logsPage, logsPageSize])
- const fetchLogFiles = useCallback(async () => {
- try {
- setFilesLoading(true)
- setFilesError(null)
- const csrf = getCookie('csrf_token')
- const resp = await fetch(`${SERVER_URL}/platform/logging/logs/files`, {
- credentials: 'include',
- headers: { 'Accept': 'application/json', ...(csrf ? { 'X-CSRF-Token': csrf } : {}) }
- })
- if (!resp.ok) throw new Error('Failed to fetch log files')
- const data = await resp.json().catch(() => ({}))
- const files: string[] = data.response?.log_files || data.log_files || []
- setLogFiles(files)
- } catch (e:any) {
- setFilesError(e?.message || 'Failed to fetch log files')
- setLogFiles([])
- } finally {
- setFilesLoading(false)
- }
- }, [])
-
- useEffect(() => {
- fetchLogFiles().catch(() => {})
- }, [fetchLogFiles])
+ // (Log file listing removed)
const fetchLogsForRequestId = useCallback(async (requestId: string) => {
try {
@@ -487,16 +462,6 @@ export default function LogsPage() {
- {logFiles.length > 0 && (
-
-
- {logFiles.slice(0, 10).map((f, i) => (
- {f}
- ))}
- {logFiles.length > 10 && +{logFiles.length - 10} more}
-
-
- )}
@@ -504,21 +469,7 @@ export default function LogsPage() {
Filters
-
-
-
-
- {filesError ? {filesError} : (
- <>
- Available log files:
- {logFiles.length}
- >
- )}
-
-
+
{canExport && (
diff --git a/web-client/src/app/users/[username]/page.tsx b/web-client/src/app/users/[username]/page.tsx
index 2858756..7f9231e 100644
--- a/web-client/src/app/users/[username]/page.tsx
+++ b/web-client/src/app/users/[username]/page.tsx
@@ -88,13 +88,13 @@ const UserDetailPage = () => {
groups: [...parsedUser.groups],
rate_limit_duration: parsedUser.rate_limit_duration,
rate_limit_duration_type: parsedUser.rate_limit_duration_type,
- rate_limit_enabled: (parsedUser as any).rate_limit_enabled,
+ rate_limit_enabled: Boolean((parsedUser as any).rate_limit_enabled),
throttle_duration: parsedUser.throttle_duration,
throttle_duration_type: parsedUser.throttle_duration_type,
throttle_wait_duration: (parsedUser as any).throttle_wait_duration,
throttle_wait_duration_type: (parsedUser as any).throttle_wait_duration_type,
throttle_queue_limit: (parsedUser as any).throttle_queue_limit,
- throttle_enabled: (parsedUser as any).throttle_enabled,
+ throttle_enabled: Boolean((parsedUser as any).throttle_enabled),
throttle_wait_duration: parsedUser.throttle_wait_duration,
throttle_wait_duration_type: parsedUser.throttle_wait_duration_type,
throttle_queue_limit: parsedUser.throttle_queue_limit,
@@ -115,9 +115,9 @@ const UserDetailPage = () => {
...prev,
bandwidth_limit_bytes: refreshed.bandwidth_limit_bytes,
bandwidth_limit_window: refreshed.bandwidth_limit_window,
- bandwidth_limit_enabled: (refreshed as any).bandwidth_limit_enabled,
- rate_limit_enabled: (refreshed as any).rate_limit_enabled,
- throttle_enabled: (refreshed as any).throttle_enabled,
+ bandwidth_limit_enabled: Boolean((refreshed as any).bandwidth_limit_enabled),
+ rate_limit_enabled: Boolean((refreshed as any).rate_limit_enabled),
+ throttle_enabled: Boolean((refreshed as any).throttle_enabled),
}))
} catch {}
})()
@@ -153,17 +153,17 @@ const UserDetailPage = () => {
groups: [...user.groups],
rate_limit_duration: user.rate_limit_duration,
rate_limit_duration_type: user.rate_limit_duration_type,
- rate_limit_enabled: (user as any).rate_limit_enabled,
+ rate_limit_enabled: Boolean((user as any).rate_limit_enabled),
throttle_duration: user.throttle_duration,
throttle_duration_type: user.throttle_duration_type,
throttle_wait_duration: user.throttle_wait_duration,
throttle_wait_duration_type: user.throttle_wait_duration_type,
throttle_queue_limit: user.throttle_queue_limit,
- throttle_enabled: (user as any).throttle_enabled,
+ throttle_enabled: Boolean((user as any).throttle_enabled),
custom_attributes: { ...user.custom_attributes },
bandwidth_limit_bytes: user.bandwidth_limit_bytes,
bandwidth_limit_window: user.bandwidth_limit_window,
- bandwidth_limit_enabled: (user as any).bandwidth_limit_enabled,
+ bandwidth_limit_enabled: Boolean((user as any).bandwidth_limit_enabled),
active: user.active,
ui_access: user.ui_access
})
@@ -586,9 +586,14 @@ const UserDetailPage = () => {
) : (
-
- {(user as any).bandwidth_limit_enabled === false ? 'Disabled' : 'Enabled'}
-
+ (() => {
+ const bwEnabled = Boolean((user as any).bandwidth_limit_enabled) && (Number(user.bandwidth_limit_bytes || 0) > 0)
+ return (
+
+ {bwEnabled ? 'Enabled' : 'Disabled'}
+
+ )
+ })()
)}
@@ -699,9 +704,14 @@ const UserDetailPage = () => {
) : (
-
- {(user as any).rate_limit_enabled === false ? 'Disabled' : 'Enabled'}
-
+ (() => {
+ const enabled = Boolean((user as any).rate_limit_enabled)
+ return (
+
+ {enabled ? 'Enabled' : 'Disabled'}
+
+ )
+ })()
)}
@@ -757,9 +767,14 @@ const UserDetailPage = () => {
) : (
-
- {(user as any).throttle_enabled === false ? 'Disabled' : 'Enabled'}
-
+ (() => {
+ const enabled = Boolean((user as any).throttle_enabled)
+ return (
+
+ {enabled ? 'Enabled' : 'Disabled'}
+
+ )
+ })()
)}
@@ -878,7 +893,7 @@ const UserDetailPage = () => {
)}
- {Object.entries(isEditing ? editData.custom_attributes || {} : user.custom_attributes).map(([key, value]) => (
+ {Object.entries(((isEditing ? editData.custom_attributes : user.custom_attributes) || {})).map(([key, value]) => (
{key}: {value}
@@ -897,7 +912,7 @@ const UserDetailPage = () => {
))}
- {Object.keys(isEditing ? editData.custom_attributes || {} : user.custom_attributes).length === 0 && (
+ {Object.keys(((isEditing ? editData.custom_attributes : user.custom_attributes) || {})).length === 0 && (
No custom attributes
)}
diff --git a/web-client/src/app/users/add/page.tsx b/web-client/src/app/users/add/page.tsx
index 24fc734..3341147 100644
--- a/web-client/src/app/users/add/page.tsx
+++ b/web-client/src/app/users/add/page.tsx
@@ -41,7 +41,16 @@ const AddUserPage = () => {
role: '',
groups: [],
custom_attributes: {},
- rate_limit_enabled: false,
+ // Defaults: enforce rate/throttle by default with reasonable values
+ rate_limit_duration: 60,
+ rate_limit_duration_type: 'minute',
+ rate_limit_enabled: true,
+ throttle_duration: 10,
+ throttle_duration_type: 'second',
+ throttle_wait_duration: 0.5,
+ throttle_wait_duration_type: 'second',
+ throttle_queue_limit: 10,
+ throttle_enabled: true,
bandwidth_limit_bytes: undefined,
bandwidth_limit_window: 'day',
bandwidth_limit_enabled: false,