mirror of
https://github.com/MrRobotjs/MUM.git
synced 2025-12-16 22:44:16 -06:00
Integrate React SPA and API v1 routes into admin UI
Added blueprint to serve React SPA for /admin routes, refactored error handling and routing to support client-side navigation, and introduced new API v1 endpoints for frontend integration. Legacy admin blueprints for users, invites, and libraries are disabled in favor of the SPA. Dockerfile and extension updates enable WebSocket support and dynamic port configuration.
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -22,4 +22,5 @@ build/
|
||||
MUM/
|
||||
node_modules/*
|
||||
acli.exe
|
||||
geminilog.txt
|
||||
geminilog.txt
|
||||
node_modules
|
||||
|
||||
@@ -51,10 +51,7 @@ RUN mkdir -p /app/instance /.cache
|
||||
|
||||
# Healthcheck and expose (already good)
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
|
||||
CMD curl -fs http://localhost:5000/health || exit 1
|
||||
CMD sh -c "curl -fs http://localhost:${FLASK_PORT:-5000}/health || exit 1"
|
||||
EXPOSE 5000
|
||||
ENTRYPOINT ["/bin/sh", "/usr/local/bin/entrypoint.sh"]
|
||||
CMD ["gunicorn", \
|
||||
"--bind", "0.0.0.0:5000", \
|
||||
"--forwarded-allow-ips", "*", \
|
||||
"run:app"]
|
||||
CMD ["sh", "-c", "gunicorn --worker-class eventlet -w 1 --bind 0.0.0.0:${FLASK_PORT:-5000} --forwarded-allow-ips='*' run:app"]
|
||||
|
||||
114
app/__init__.py
114
app/__init__.py
@@ -5,7 +5,7 @@ from logging.handlers import RotatingFileHandler
|
||||
import secrets
|
||||
from datetime import datetime
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
from flask import Flask, g, request, redirect, url_for, current_app, render_template, flash
|
||||
from flask import Flask, g, request, redirect, url_for, current_app, render_template, flash, send_from_directory, jsonify
|
||||
from flask_login import current_user
|
||||
|
||||
from .config import config
|
||||
@@ -15,8 +15,9 @@ from .extensions import (
|
||||
login_manager,
|
||||
csrf,
|
||||
scheduler,
|
||||
babel,
|
||||
htmx
|
||||
babel,
|
||||
htmx,
|
||||
socketio
|
||||
)
|
||||
from .models import User, UserType, Setting, EventType
|
||||
from .utils import helpers
|
||||
@@ -72,11 +73,40 @@ def register_error_handlers(app):
|
||||
def bad_request_page(error):
|
||||
# Check if this is a CSRF error
|
||||
error_description = str(error.description) if hasattr(error, 'description') else ""
|
||||
wants_json = (
|
||||
request.path.startswith('/admin/api/')
|
||||
or request.accept_mimetypes['application/json'] >= request.accept_mimetypes['text/html']
|
||||
)
|
||||
if wants_json:
|
||||
response = jsonify({
|
||||
'error': {
|
||||
'code': 'BAD_REQUEST',
|
||||
'message': error_description or 'Bad request.'
|
||||
}
|
||||
})
|
||||
response.status_code = 400
|
||||
return response
|
||||
return render_template("errors/400.html", error_description=error_description), 400
|
||||
@app.errorhandler(403)
|
||||
def forbidden_page(error): return render_template("errors/403.html"), 403
|
||||
@app.route('/favicon.ico')
|
||||
def favicon():
|
||||
return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico', mimetype='image/x-icon')
|
||||
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(error): return render_template("errors/404.html"), 404
|
||||
def page_not_found(error):
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
|
||||
if request.path.startswith('/admin') and current_user.is_authenticated:
|
||||
dist_path = os.path.join(current_app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
if os.path.exists(index_path):
|
||||
current_app.logger.debug('Serving React SPA for 404 path: %s', request.path)
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
|
||||
return render_template("errors/404.html"), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def server_error_page(error): return render_template("errors/500.html"), 500
|
||||
|
||||
@@ -127,6 +157,7 @@ def create_app(config_name=None):
|
||||
csrf.init_app(app)
|
||||
htmx.init_app(app)
|
||||
babel.init_app(app, locale_selector=get_locale_for_babel)
|
||||
socketio.init_app(app)
|
||||
|
||||
# Define custom unauthorized handler to route to correct login page based on requested endpoint
|
||||
@login_manager.unauthorized_handler
|
||||
@@ -231,13 +262,23 @@ def create_app(config_name=None):
|
||||
try:
|
||||
engine_conn_scheduler = db.engine.connect()
|
||||
if db.engine.dialect.has_table(engine_conn_scheduler, Setting.__tablename__):
|
||||
from .services import task_service
|
||||
from .services import task_service
|
||||
task_service.schedule_all_tasks()
|
||||
app.logger.info("Scheduled background tasks successfully.")
|
||||
|
||||
try:
|
||||
from .services.plex_websocket_monitor import start_plex_websocket_monitor
|
||||
start_plex_websocket_monitor(app)
|
||||
except Exception as plex_ws_error:
|
||||
app.logger.error(
|
||||
"Failed to start Plex WebSocket monitor: %s",
|
||||
plex_ws_error,
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
app.logger.warning("Init.py - Settings table not found when trying to schedule tasks; task scheduling that depends on DB settings is skipped.")
|
||||
except Exception as e_task_sched:
|
||||
app.logger.error(f"Init.py - Error during task scheduling DB interaction or call: {e_task_sched}", exc_info=True)
|
||||
app.logger.error(f"Init.py - Error during task scheduling DB interaction or call: {e_task_sched}", exc_info=True)
|
||||
finally:
|
||||
if engine_conn_scheduler:
|
||||
engine_conn_scheduler.close()
|
||||
@@ -523,15 +564,21 @@ def create_app(config_name=None):
|
||||
app.register_blueprint(admin_management_bp, url_prefix='/admin/settings/admins')
|
||||
from .routes.role_management import bp as role_management_bp
|
||||
app.register_blueprint(role_management_bp, url_prefix='/admin/settings/admin/roles')
|
||||
from .routes.users import bp as users_bp
|
||||
app.register_blueprint(users_bp, url_prefix='/admin/users')
|
||||
# Legacy users blueprint disabled - now using React SPA for /admin/users
|
||||
# from .routes.users import bp as users_bp
|
||||
# app.register_blueprint(users_bp, url_prefix='/admin/users')
|
||||
from .routes.admin_user import admin_user_bp
|
||||
app.register_blueprint(admin_user_bp, url_prefix='/admin/user')
|
||||
# Legacy invites blueprints - admin disabled, public still active for invite acceptance
|
||||
from .routes.invites import bp_public as invites_public_bp, bp_admin as invites_admin_bp
|
||||
app.register_blueprint(invites_public_bp)
|
||||
app.register_blueprint(invites_admin_bp, url_prefix='/admin/invites')
|
||||
app.register_blueprint(invites_public_bp) # Keep public invites active for accepting invites
|
||||
# app.register_blueprint(invites_admin_bp, url_prefix='/admin/invites') # Disabled - using React SPA
|
||||
from .routes.api import bp as api_bp
|
||||
app.register_blueprint(api_bp, url_prefix='/admin/api')
|
||||
from .routes.public_api_v1 import bp as public_api_v1_bp
|
||||
app.register_blueprint(public_api_v1_bp, url_prefix='/api/v1')
|
||||
from .routes.api_v1 import bp as api_v1_bp
|
||||
app.register_blueprint(api_v1_bp, url_prefix='/admin/api/v1')
|
||||
from .routes.user import bp as user_bp
|
||||
app.register_blueprint(user_bp)
|
||||
# Media servers - needed for setup routes
|
||||
@@ -544,9 +591,48 @@ def create_app(config_name=None):
|
||||
app.register_blueprint(user_preferences_bp, url_prefix='/settings/preferences')
|
||||
from .routes.streaming import bp as streaming_bp
|
||||
app.register_blueprint(streaming_bp, url_prefix='/admin')
|
||||
from .routes.libraries import bp as libraries_bp
|
||||
app.register_blueprint(libraries_bp, url_prefix='/admin')
|
||||
|
||||
# Legacy libraries blueprint disabled - now using React SPA for /admin/libraries
|
||||
# from .routes.libraries import bp as libraries_bp
|
||||
# app.register_blueprint(libraries_bp, url_prefix='/admin')
|
||||
from .routes.websockets import bp as websockets_bp
|
||||
app.register_blueprint(websockets_bp)
|
||||
|
||||
# Register React SPA blueprint LAST to act as catch-all for /admin UI routes
|
||||
# This serves the React app for any /admin path not handled by the above blueprints
|
||||
# IMPORTANT: Must be registered after all other /admin blueprints
|
||||
from .routes.admin_spa import admin_spa_bp
|
||||
app.register_blueprint(admin_spa_bp, url_prefix='/admin')
|
||||
|
||||
def _serve_admin_settings_spa():
|
||||
dist_path = os.path.join(app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
if not os.path.exists(index_path):
|
||||
current_app.logger.error('React SPA build not found at %s', index_path)
|
||||
return (
|
||||
'<h1>React App Not Built</h1>'
|
||||
'<p>The React admin interface has not been built yet.</p>'
|
||||
'<p>Please run: <code>cd frontend && npm run build</code></p>'
|
||||
), 500
|
||||
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
|
||||
@app.before_request
|
||||
def _intercept_admin_settings_spa():
|
||||
if request.method != 'GET':
|
||||
return None
|
||||
|
||||
path = request.path.rstrip('/')
|
||||
if not path.startswith('/admin/settings'):
|
||||
return None
|
||||
|
||||
if path.startswith('/admin/api'):
|
||||
return None
|
||||
|
||||
if path == '/admin/settings' or path.startswith('/admin/settings'):
|
||||
return _serve_admin_settings_spa()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
register_error_handlers(app)
|
||||
|
||||
@@ -621,4 +707,4 @@ def create_app(config_name=None):
|
||||
# Make the function available as a global template function too
|
||||
app.jinja_env.globals['format_datetime_with_user_timezone'] = format_datetime_with_user_timezone_filter
|
||||
|
||||
return app
|
||||
return app
|
||||
|
||||
@@ -7,6 +7,7 @@ from flask_session import Session # If using Flask-Session
|
||||
from flask_apscheduler import APScheduler
|
||||
from flask_babel import Babel
|
||||
from flask_htmx import HTMX
|
||||
from flask_socketio import SocketIO
|
||||
from cachetools import TTLCache
|
||||
|
||||
# Database
|
||||
@@ -40,6 +41,10 @@ babel = Babel()
|
||||
# Flask-HTMX
|
||||
htmx = HTMX()
|
||||
|
||||
# Flask-SocketIO for WebSocket support
|
||||
# Use eventlet for production (Gunicorn compatibility)
|
||||
socketio = SocketIO(cors_allowed_origins="*", async_mode='eventlet')
|
||||
|
||||
# Global in-memory cache example (e.g., for Plex libraries, server status for short periods)
|
||||
# Cache for 5 minutes, max 100 items
|
||||
# You might want more sophisticated caching (e.g., Flask-Caching with Redis/Memcached) for a larger app.
|
||||
|
||||
@@ -34,6 +34,7 @@ class EventType(enum.Enum): # ... (as before, will add bot-specific events later
|
||||
PLEX_SYNC_USERS_START = "PLEX_SYNC_USERS_START"; PLEX_SYNC_USERS_COMPLETE = "PLEX_SYNC_USERS_COMPLETE"
|
||||
PLEX_USER_ADDED = "PLEX_USER_ADDED_TO_SERVER"; PLEX_USER_REMOVED = "PLEX_USER_REMOVED_FROM_SERVER"
|
||||
PLEX_USER_LIBS_UPDATED = "PLEX_USER_LIBS_UPDATED_ON_SERVER"; PLEX_SESSION_DETECTED = "PLEX_SESSION_DETECTED"
|
||||
STREAMING_SESSION_TERMINATED = "STREAMING_SESSION_TERMINATED"
|
||||
MUM_USER_ADDED_FROM_PLEX = "MUM_USER_ADDED_FROM_PLEX"
|
||||
MUM_USER_REMOVED_MISSING_IN_PLEX = "MUM_USER_REMOVED_MISSING_IN_PLEX"
|
||||
MUM_USER_LIBRARIES_EDITED = "MUM_USER_LIBRARIES_EDITED"
|
||||
|
||||
61
app/routes/admin_spa.py
Normal file
61
app/routes/admin_spa.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Admin SPA route handler
|
||||
|
||||
Serves the React SPA for /admin UI routes while preserving API routes.
|
||||
|
||||
IMPORTANT: This blueprint should be registered LAST to act as a catch-all
|
||||
for any /admin routes not handled by API or legacy blueprints.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, send_from_directory, current_app, request
|
||||
from flask_login import login_required
|
||||
from app.utils.helpers import setup_required
|
||||
import os
|
||||
|
||||
# Create blueprint with high url_prefix specificity to avoid conflicts
|
||||
admin_spa_bp = Blueprint('admin_spa', __name__)
|
||||
|
||||
|
||||
@admin_spa_bp.route('/', defaults={'path': ''})
|
||||
@admin_spa_bp.route('/<path:path>')
|
||||
@login_required
|
||||
@setup_required
|
||||
def serve_spa(path):
|
||||
"""
|
||||
Serve the React SPA for /admin UI routes.
|
||||
|
||||
This is a catch-all route that serves the React app for any path under /admin
|
||||
that hasn't been handled by other blueprints (like API routes).
|
||||
|
||||
The React app is built to app/static/dist/ during the build process.
|
||||
React Router handles client-side routing once the app loads.
|
||||
|
||||
Examples of paths handled:
|
||||
- /admin → Dashboard
|
||||
- /admin/dashboard → Dashboard
|
||||
- /admin/users → Users list
|
||||
- /admin/users/abc-123 → User detail
|
||||
- /admin/settings/general → General settings
|
||||
|
||||
API routes (/admin/api/*) are NOT handled here - they're handled by api_bp.
|
||||
"""
|
||||
# Path to the React build output
|
||||
dist_path = os.path.join(current_app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
|
||||
# Check if the React build exists
|
||||
if not os.path.exists(index_path):
|
||||
current_app.logger.error(f"React SPA build not found at {index_path}")
|
||||
current_app.logger.error("Please run: cd frontend && npm run build")
|
||||
current_app.logger.error(f"Expected path: {index_path}")
|
||||
return (
|
||||
"<h1>React App Not Built</h1>"
|
||||
"<p>The React admin interface has not been built yet.</p>"
|
||||
"<p>Please run: <code>cd frontend && npm run build</code></p>"
|
||||
), 500
|
||||
|
||||
# Log SPA serving for debugging (can be removed in production)
|
||||
current_app.logger.debug(f"Serving React SPA for path: /admin/{path}")
|
||||
|
||||
# Serve the React app's index.html for all UI routes
|
||||
# React Router will parse the path and render the appropriate component
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
@@ -710,91 +710,47 @@ def sync_library_content(library_id):
|
||||
raise
|
||||
|
||||
if has_changes:
|
||||
# Use the original result (which already has the correct list and count fields)
|
||||
normalized_result = result.copy()
|
||||
current_app.logger.debug(f"About to render template with normalized_result keys: {list(normalized_result.keys())}")
|
||||
|
||||
try:
|
||||
# Show modal for changes or errors
|
||||
modal_html = render_template('libraries/_partials/library_content_sync_results_modal.html',
|
||||
sync_result=normalized_result,
|
||||
library_name=library.name)
|
||||
current_app.logger.debug(f"Template rendered successfully")
|
||||
except Exception as e:
|
||||
current_app.logger.debug(f"Error rendering template: {e}")
|
||||
current_app.logger.debug(f"normalized_result data: {normalized_result}")
|
||||
raise
|
||||
|
||||
try:
|
||||
if result.get('errors') and len(result.get('errors', [])) > 0:
|
||||
message = f"Library sync completed with {len(result.get('errors', []))} errors. See details."
|
||||
category = "warning"
|
||||
else:
|
||||
message = f"Library sync complete. {added} added, {updated} updated, {removed} removed."
|
||||
category = "success"
|
||||
current_app.logger.debug(f"Message created: {message}")
|
||||
except Exception as e:
|
||||
current_app.logger.debug(f"Error creating message: {e}")
|
||||
raise
|
||||
|
||||
trigger_payload = {
|
||||
"showToastEvent": {"message": message, "category": category},
|
||||
"openLibraryContentSyncResultsModal": True,
|
||||
"refreshLibraryPage": True
|
||||
}
|
||||
headers = {
|
||||
'HX-Retarget': '#library_content_sync_results_modal',
|
||||
'HX-Reswap': 'innerHTML',
|
||||
'HX-Trigger-After-Swap': json.dumps(trigger_payload)
|
||||
}
|
||||
return make_response(modal_html, 200, headers)
|
||||
# Return JSON response for React frontend
|
||||
current_app.logger.debug(f"Library sync has changes, returning JSON response")
|
||||
|
||||
# Create success message
|
||||
if result.get('errors') and len(result.get('errors', [])) > 0:
|
||||
message = f"Library sync completed with {len(result.get('errors', []))} errors. See details."
|
||||
else:
|
||||
message = f"Library sync complete. {added} added, {updated} updated, {removed} removed."
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': message,
|
||||
'result': result
|
||||
})
|
||||
else:
|
||||
# No changes - just show toast (no page refresh needed)
|
||||
current_app.logger.debug("Taking NO CHANGES path - should show toast only")
|
||||
# No changes - return JSON response
|
||||
current_app.logger.debug("Taking NO CHANGES path - returning JSON")
|
||||
total_items = result.get('total_items', 0)
|
||||
trigger_payload = {
|
||||
"showToastEvent": {
|
||||
"message": f"Library sync complete. No changes were made to {total_items} items.",
|
||||
"category": "success"
|
||||
}
|
||||
}
|
||||
headers = {
|
||||
'HX-Trigger': json.dumps(trigger_payload)
|
||||
}
|
||||
current_app.logger.debug(f"Returning empty response with headers: {headers}")
|
||||
current_app.logger.debug(f"Trigger payload: {trigger_payload}")
|
||||
response = make_response("", 200, headers)
|
||||
current_app.logger.debug(f"Response created successfully, returning to client")
|
||||
return response
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f"Library sync complete. No changes were made to {total_items} items.",
|
||||
'result': result
|
||||
})
|
||||
else:
|
||||
current_app.logger.error(f"Library sync failed: {result.get('error', 'Unknown error')}")
|
||||
|
||||
# Return error response
|
||||
toast_payload = {
|
||||
"showToastEvent": {
|
||||
"message": f"Library sync failed: {result.get('error', 'Unknown error')}",
|
||||
"category": "error"
|
||||
}
|
||||
}
|
||||
|
||||
response = make_response("", 500)
|
||||
response.headers['HX-Trigger'] = json.dumps(toast_payload)
|
||||
return response
|
||||
|
||||
|
||||
# Return JSON error response
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': result.get('error', 'Unknown error')
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Error in library sync endpoint: {e}")
|
||||
|
||||
# Return error response
|
||||
toast_payload = {
|
||||
"showToastEvent": {
|
||||
"message": f"Library sync failed: {str(e)}",
|
||||
"category": "error"
|
||||
}
|
||||
}
|
||||
|
||||
response = make_response("", 500)
|
||||
response.headers['HX-Trigger'] = json.dumps(toast_payload)
|
||||
return response
|
||||
|
||||
# Return JSON error response
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/purge', methods=['POST'])
|
||||
@login_required
|
||||
@@ -894,6 +850,7 @@ def jellyfin_image_proxy():
|
||||
"""Proxy Jellyfin images through the application"""
|
||||
item_id = request.args.get('item_id')
|
||||
image_type = request.args.get('image_type', 'Primary')
|
||||
image_tag = request.args.get('image_tag')
|
||||
|
||||
#current_app.logger.info(f"API jellyfin_image_proxy: Received request for item_id='{item_id}', image_type='{image_type}'")
|
||||
|
||||
@@ -919,6 +876,8 @@ def jellyfin_image_proxy():
|
||||
|
||||
# Construct Jellyfin image URL
|
||||
jellyfin_image_url = f"{jellyfin_server.url.rstrip('/')}/Items/{item_id}/Images/{image_type}"
|
||||
if image_tag:
|
||||
jellyfin_image_url = f"{jellyfin_image_url}?tag={image_tag}"
|
||||
|
||||
#current_app.logger.info(f"API jellyfin_image_proxy: Fetching image from Jellyfin URL: {jellyfin_image_url}")
|
||||
|
||||
|
||||
6
app/routes/api_v1/__init__.py
Normal file
6
app/routes/api_v1/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from flask import Blueprint
|
||||
|
||||
bp = Blueprint('api_v1', __name__)
|
||||
|
||||
# Import modules to register routes with the blueprint
|
||||
from . import dashboard, streams, statistics, history, auth, users, users_detail, users_actions, users_history, users_service_accounts, users_overseerr, users_settings, users_bulk, users_sync, users_purge, invites, invites_bulk, invite_public, streams_api, servers, libraries, settings_general, settings_user_accounts, settings_advanced, settings_logs, settings_discord, settings_streaming, plugins, admins, admin_roles, user_roles, metrics, setup, account # noqa: E402,F401
|
||||
256
app/routes/api_v1/account.py
Normal file
256
app/routes/api_v1/account.py
Normal file
@@ -0,0 +1,256 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import User, UserPreferences, UserType, EventType
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import log_event
|
||||
|
||||
|
||||
def _serialize_account_payload():
|
||||
prefs = UserPreferences.get_timezone_preference(current_user.id)
|
||||
|
||||
user_payload = {
|
||||
"uuid": current_user.uuid,
|
||||
"username": current_user.localUsername,
|
||||
"email": current_user.email or current_user.discord_email,
|
||||
"display_name": getattr(current_user, "get_display_name", lambda: None)(),
|
||||
"user_type": current_user.userType.value if hasattr(current_user.userType, "value") else str(current_user.userType),
|
||||
"force_password_change": bool(getattr(current_user, "force_password_change", False)),
|
||||
"has_password": bool(current_user.password_hash),
|
||||
"last_login_at": current_user.last_login_at.isoformat() if current_user.last_login_at else None,
|
||||
"is_owner": current_user.userType == UserType.OWNER,
|
||||
}
|
||||
|
||||
return {
|
||||
"user": user_payload,
|
||||
"timezone": {
|
||||
"preference": prefs.get("preference"),
|
||||
"local_timezone": prefs.get("local_timezone"),
|
||||
"time_format": prefs.get("time_format"),
|
||||
},
|
||||
"capabilities": {
|
||||
"can_set_initial_credentials": not current_user.password_hash,
|
||||
"can_change_password": bool(current_user.password_hash),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@bp.route("/account", methods=["GET"])
|
||||
@login_required
|
||||
def get_account():
|
||||
request_id = str(uuid4())
|
||||
|
||||
response = {
|
||||
"data": _serialize_account_payload(),
|
||||
"meta": {
|
||||
"request_id": request_id,
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
},
|
||||
}
|
||||
|
||||
return jsonify(response), 200
|
||||
|
||||
|
||||
@bp.route("/account/timezone", methods=["PUT"])
|
||||
@login_required
|
||||
def update_account_timezone():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
preference = (payload.get("preference") or "").strip().lower()
|
||||
time_format = (payload.get("time_format") or "").strip()
|
||||
local_timezone = (payload.get("local_timezone") or "").strip() or None
|
||||
|
||||
if preference not in {"local", "utc"}:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "INVALID_PREFERENCE",
|
||||
"message": "preference must be either 'local' or 'utc'.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if time_format not in {"12", "24"}:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "INVALID_TIME_FORMAT",
|
||||
"message": "time_format must be either '12' or '24'.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if preference == "local" and not local_timezone:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "MISSING_TIMEZONE",
|
||||
"message": "local_timezone is required when preference is 'local'.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
try:
|
||||
UserPreferences.set_timezone_preference(
|
||||
owner_id=current_user.id,
|
||||
preference=preference,
|
||||
local_timezone=local_timezone,
|
||||
time_format=time_format,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
current_app.logger.exception("Failed to update timezone preferences: %s", exc)
|
||||
db.session.rollback()
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "TIMEZONE_UPDATE_FAILED",
|
||||
"message": "Unable to update timezone preferences.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
500,
|
||||
)
|
||||
|
||||
response = {
|
||||
"data": _serialize_account_payload(),
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
return jsonify(response), 200
|
||||
|
||||
|
||||
@bp.route("/account/initial-credentials", methods=["POST"])
|
||||
@login_required
|
||||
def set_initial_credentials():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
username = (payload.get("username") or "").strip()
|
||||
password = payload.get("password") or ""
|
||||
|
||||
if current_user.password_hash:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "CREDENTIALS_ALREADY_SET",
|
||||
"message": "Local credentials have already been configured for this account.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
409,
|
||||
)
|
||||
|
||||
if not username:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "INVALID_USERNAME",
|
||||
"message": "Username is required.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if len(username) < 3 or len(username) > 80:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "USERNAME_LENGTH_INVALID",
|
||||
"message": "Username must be between 3 and 80 characters long.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
422,
|
||||
)
|
||||
|
||||
existing_user = User.get_by_local_username(username)
|
||||
if existing_user and existing_user.id != current_user.id:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "USERNAME_TAKEN",
|
||||
"message": "That username is already in use. Choose a different one.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
409,
|
||||
)
|
||||
|
||||
if not password or len(password) < 8:
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "WEAK_PASSWORD",
|
||||
"message": "Password must be at least 8 characters long.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
422,
|
||||
)
|
||||
|
||||
current_user.localUsername = username
|
||||
current_user.set_password(password)
|
||||
if hasattr(current_user, "force_password_change"):
|
||||
current_user.force_password_change = False
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
current_app.logger.exception("Failed to set initial credentials: %s", exc)
|
||||
db.session.rollback()
|
||||
return (
|
||||
jsonify(
|
||||
{
|
||||
"error": {
|
||||
"code": "CREDENTIAL_UPDATE_FAILED",
|
||||
"message": "Unable to save credentials.",
|
||||
},
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
),
|
||||
500,
|
||||
)
|
||||
|
||||
log_event(
|
||||
EventType.ADMIN_PASSWORD_CHANGE,
|
||||
f"Initial local credentials configured for '{current_user.localUsername}'.",
|
||||
admin_id=current_user.id,
|
||||
)
|
||||
|
||||
response = {
|
||||
"data": _serialize_account_payload(),
|
||||
"meta": {"request_id": request_id},
|
||||
}
|
||||
return jsonify(response), 200
|
||||
|
||||
451
app/routes/api_v1/admin_roles.py
Normal file
451
app/routes/api_v1/admin_roles.py
Normal file
@@ -0,0 +1,451 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import permission_required
|
||||
from app.models import AdminRole, AdminPermission, User
|
||||
from app.extensions import db
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def _serialize_admin_role(role, include_permissions=False, include_users=False):
|
||||
"""Serialize an AdminRole object to JSON"""
|
||||
data = {
|
||||
'id': role.id,
|
||||
'name': role.name,
|
||||
'description': role.description,
|
||||
'position': role.position,
|
||||
'color': role.color,
|
||||
'icon': role.icon,
|
||||
'is_staff_role': role.is_staff_role()
|
||||
}
|
||||
|
||||
if include_permissions:
|
||||
data['permissions'] = [{
|
||||
'id': perm.id,
|
||||
'name': perm.name,
|
||||
'description': perm.description
|
||||
} for perm in role.permissions]
|
||||
|
||||
if include_users:
|
||||
users = AdminRole.get_users_with_role(role.id)
|
||||
data['users'] = [{
|
||||
'uuid': user.uuid,
|
||||
'username': user.get_display_name(),
|
||||
'user_type': user.userType.value
|
||||
} for user in users]
|
||||
data['user_count'] = len(users)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@bp.route('/admin-roles', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def list_admin_roles():
|
||||
"""List all admin roles with optional filtering"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
include_permissions = request.args.get('include_permissions', 'false').lower() == 'true'
|
||||
include_users = request.args.get('include_users', 'false').lower() == 'true'
|
||||
|
||||
# Get all roles ordered by position (descending - highest first)
|
||||
roles = AdminRole.query.order_by(AdminRole.position.desc()).all()
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_admin_role(role, include_permissions, include_users) for role in roles],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'total_count': len(roles),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/admin-roles/<role_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def get_admin_role(role_id):
|
||||
"""Get a single admin role by ID"""
|
||||
request_id = str(uuid4())
|
||||
role = AdminRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'Admin role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
include_permissions = request.args.get('include_permissions', 'true').lower() == 'true'
|
||||
include_users = request.args.get('include_users', 'true').lower() == 'true'
|
||||
|
||||
data = _serialize_admin_role(role, include_permissions, include_users)
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/admin-roles', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def create_admin_role():
|
||||
"""Create a new admin role"""
|
||||
request_id = str(uuid4())
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Validate required fields
|
||||
if not data.get('name'):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'VALIDATION_ERROR',
|
||||
'message': 'Missing required field: name',
|
||||
'details': {'missing_fields': ['name']}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
# Check if role name already exists
|
||||
existing = AdminRole.query.filter_by(name=data['name']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_ROLE_NAME',
|
||||
'message': f'Admin role with name "{data["name"]}" already exists',
|
||||
'details': {'name': data['name']},
|
||||
'hint': 'Choose a unique name for this role'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Prevent creating Staff role (it's special)
|
||||
if data['name'].lower() == 'staff':
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_ROLE_NAME',
|
||||
'message': 'Cannot create a role named "Staff" - it is a system role',
|
||||
'hint': 'Choose a different name for this role'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
# Create role
|
||||
role = AdminRole(
|
||||
name=data['name'],
|
||||
description=data.get('description'),
|
||||
position=data.get('position', 0),
|
||||
color=data.get('color', '#808080'),
|
||||
icon=data.get('icon')
|
||||
)
|
||||
|
||||
# Add permissions if provided
|
||||
if 'permission_ids' in data and isinstance(data['permission_ids'], list):
|
||||
permissions = AdminPermission.query.filter(AdminPermission.id.in_(data['permission_ids'])).all()
|
||||
role.permissions = permissions
|
||||
|
||||
try:
|
||||
db.session.add(role)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_admin_role(role, include_permissions=True),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
}), 201
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_CREATION_FAILED',
|
||||
'message': 'Failed to create admin role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/admin-roles/<role_id>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def update_admin_role(role_id):
|
||||
"""Update an existing admin role"""
|
||||
request_id = str(uuid4())
|
||||
role = AdminRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'Admin role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Prevent modifying Staff role
|
||||
if role.is_staff_role():
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'CANNOT_MODIFY_STAFF_ROLE',
|
||||
'message': 'Cannot modify the Staff role - it is a system role',
|
||||
'hint': 'The Staff role is automatically managed'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
# Check if current user can manage this role (hierarchy check)
|
||||
if not current_user.can_manage_role(role):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INSUFFICIENT_ROLE_HIERARCHY',
|
||||
'message': 'Cannot manage a role with equal or higher position than your own',
|
||||
'details': {
|
||||
'your_position': current_user.get_highest_role_position(),
|
||||
'target_position': role.position
|
||||
}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Check for name conflicts if changing name
|
||||
if 'name' in data and data['name'] != role.name:
|
||||
if data['name'].lower() == 'staff':
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_ROLE_NAME',
|
||||
'message': 'Cannot rename to "Staff" - it is a system role',
|
||||
'hint': 'Choose a different name'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
existing = AdminRole.query.filter_by(name=data['name']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_ROLE_NAME',
|
||||
'message': f'Admin role with name "{data["name"]}" already exists',
|
||||
'details': {'name': data['name']}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Update fields
|
||||
updatable_fields = ['name', 'description', 'position', 'color', 'icon']
|
||||
for field in updatable_fields:
|
||||
if field in data:
|
||||
setattr(role, field, data[field])
|
||||
|
||||
# Update permissions if provided
|
||||
if 'permission_ids' in data and isinstance(data['permission_ids'], list):
|
||||
permissions = AdminPermission.query.filter(AdminPermission.id.in_(data['permission_ids'])).all()
|
||||
role.permissions = permissions
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_admin_role(role, include_permissions=True),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_UPDATE_FAILED',
|
||||
'message': 'Failed to update admin role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/admin-roles/<role_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def delete_admin_role(role_id):
|
||||
"""Delete an admin role"""
|
||||
request_id = str(uuid4())
|
||||
role = AdminRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'Admin role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Prevent deleting Staff role
|
||||
if role.is_staff_role():
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'CANNOT_DELETE_STAFF_ROLE',
|
||||
'message': 'Cannot delete the Staff role - it is a system role',
|
||||
'hint': 'The Staff role is automatically managed'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
# Check if current user can manage this role (hierarchy check)
|
||||
if not current_user.can_manage_role(role):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INSUFFICIENT_ROLE_HIERARCHY',
|
||||
'message': 'Cannot delete a role with equal or higher position than your own',
|
||||
'details': {
|
||||
'your_position': current_user.get_highest_role_position(),
|
||||
'target_position': role.position
|
||||
}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
# Check if role has users assigned
|
||||
users_with_role = AdminRole.get_users_with_role(role.id)
|
||||
if users_with_role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_HAS_USERS',
|
||||
'message': f'Cannot delete role that is assigned to {len(users_with_role)} user(s)',
|
||||
'details': {'user_count': len(users_with_role)},
|
||||
'hint': 'Remove this role from all users before deleting'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
role_data = _serialize_admin_role(role)
|
||||
|
||||
try:
|
||||
db.session.delete(role)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'deleted_role': role_data
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_DELETION_FAILED',
|
||||
'message': 'Failed to delete admin role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/admin-roles/<role_id>/users', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def get_admin_role_users(role_id):
|
||||
"""Get all users assigned to an admin role"""
|
||||
request_id = str(uuid4())
|
||||
role = AdminRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'Admin role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
users = AdminRole.get_users_with_role(role.id)
|
||||
|
||||
return jsonify({
|
||||
'data': [{
|
||||
'uuid': user.uuid,
|
||||
'username': user.get_display_name(),
|
||||
'user_type': user.userType.value,
|
||||
'email': user.get_email(),
|
||||
'is_active': user.is_active
|
||||
} for user in users],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'role': {
|
||||
'id': role.id,
|
||||
'name': role.name
|
||||
},
|
||||
'total_count': len(users),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/admin-permissions', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_roles')
|
||||
def list_admin_permissions():
|
||||
"""List all available admin permissions"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
permissions = AdminPermission.query.order_by(AdminPermission.name).all()
|
||||
|
||||
return jsonify({
|
||||
'data': [{
|
||||
'id': perm.id,
|
||||
'name': perm.name,
|
||||
'description': perm.description
|
||||
} for perm in permissions],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'total_count': len(permissions),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
190
app/routes/api_v1/admins.py
Normal file
190
app/routes/api_v1/admins.py
Normal file
@@ -0,0 +1,190 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, AdminRole, EventType
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required, any_permission_required, log_event
|
||||
|
||||
|
||||
def _serialize_admin(user: User):
|
||||
return {
|
||||
'id': user.id,
|
||||
'uuid': getattr(user, 'uuid', None),
|
||||
'username': user.localUsername,
|
||||
'display_name': user.get_display_name() if hasattr(user, 'get_display_name') else user.localUsername,
|
||||
'user_type': user.userType.value if hasattr(user.userType, 'value') else str(user.userType),
|
||||
'email': user.email,
|
||||
'last_login_at': user.last_login_at.isoformat() if user.last_login_at else None,
|
||||
'admin_roles': [
|
||||
{
|
||||
'id': role.id,
|
||||
'name': role.name,
|
||||
'description': role.description,
|
||||
'position': role.position,
|
||||
'color': role.color,
|
||||
'icon': role.icon
|
||||
}
|
||||
for role in user.admin_roles
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/admins', methods=['GET'])
|
||||
@login_required
|
||||
@any_permission_required(['create_admin', 'edit_admin', 'delete_admin'])
|
||||
def list_admins():
|
||||
request_id = str(uuid4())
|
||||
owner = User.get_owner()
|
||||
admins = User.query.filter(
|
||||
User.userType == UserType.LOCAL,
|
||||
User.admin_roles.any()
|
||||
).order_by(User.localUsername.asc()).all()
|
||||
|
||||
data = []
|
||||
if owner:
|
||||
data.append(_serialize_admin(owner))
|
||||
data.extend(_serialize_admin(admin) for admin in admins)
|
||||
|
||||
return jsonify({'data': data, 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/admins', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('create_admin')
|
||||
def create_admin():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
username = (payload.get('username') or '').strip()
|
||||
password = payload.get('password')
|
||||
role_ids = payload.get('role_ids') or []
|
||||
|
||||
if not username or not password:
|
||||
return jsonify({
|
||||
'error': {'code': 'INVALID_PAYLOAD', 'message': 'Username and password are required.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if User.get_by_local_username(username):
|
||||
return jsonify({
|
||||
'error': {'code': 'USERNAME_EXISTS', 'message': 'A user with that username already exists.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
try:
|
||||
new_user = User.create_admin_user(username=username, password=password)
|
||||
if role_ids:
|
||||
roles = AdminRole.query.filter(AdminRole.id.in_(role_ids)).all()
|
||||
new_user.set_admin_roles(roles)
|
||||
new_user.force_password_change = True
|
||||
db.session.add(new_user)
|
||||
db.session.commit()
|
||||
log_event(EventType.MUM_USER_ADDED_FROM_PLEX, f"Admin user '{username}' created via API.", admin_id=current_user.id)
|
||||
except Exception as exc:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {'code': 'CREATE_FAILED', 'message': str(exc)},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
return jsonify({'data': _serialize_admin(new_user), 'meta': {'request_id': request_id}}), 201
|
||||
|
||||
|
||||
@bp.route('/admins/<int:admin_id>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('edit_admin')
|
||||
def update_admin(admin_id):
|
||||
request_id = str(uuid4())
|
||||
if current_user.id == admin_id:
|
||||
return jsonify({
|
||||
'error': {'code': 'SELF_EDIT_FORBIDDEN', 'message': 'Use the account page to manage your own roles.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
user = User.query.filter_by(id=admin_id, userType=UserType.LOCAL).first_or_404()
|
||||
payload = request.get_json(silent=True) or {}
|
||||
role_ids = payload.get('role_ids') or []
|
||||
|
||||
roles = AdminRole.query.filter(AdminRole.id.in_(role_ids)).all()
|
||||
try:
|
||||
user.set_admin_roles(roles)
|
||||
db.session.commit()
|
||||
log_event(EventType.SETTING_CHANGE, f"Admin roles updated for '{user.localUsername}'.", admin_id=current_user.id)
|
||||
except Exception as exc:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {'code': 'UPDATE_FAILED', 'message': str(exc)},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
return jsonify({'data': _serialize_admin(user), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/admins/<int:admin_id>/reset-password', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('edit_admin')
|
||||
def reset_admin_password(admin_id):
|
||||
request_id = str(uuid4())
|
||||
if current_user.id == admin_id:
|
||||
return jsonify({
|
||||
'error': {'code': 'SELF_RESET_FORBIDDEN', 'message': 'Cannot reset your own password via this endpoint.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
user = User.query.filter_by(id=admin_id, userType=UserType.LOCAL).first_or_404()
|
||||
payload = request.get_json(silent=True) or {}
|
||||
new_password = payload.get('password')
|
||||
if not new_password:
|
||||
return jsonify({
|
||||
'error': {'code': 'INVALID_PAYLOAD', 'message': 'Password is required.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
user.set_password(new_password)
|
||||
user.force_password_change = True
|
||||
db.session.commit()
|
||||
log_event(EventType.ADMIN_PASSWORD_CHANGE, f"Password reset for '{user.localUsername}'.", admin_id=current_user.id)
|
||||
except Exception as exc:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {'code': 'RESET_FAILED', 'message': str(exc)},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
return jsonify({'data': {'success': True}, 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/admins/<int:admin_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('delete_admin')
|
||||
def delete_admin(admin_id):
|
||||
request_id = str(uuid4())
|
||||
if current_user.id == admin_id:
|
||||
return jsonify({
|
||||
'error': {'code': 'SELF_DELETE_FORBIDDEN', 'message': 'You cannot delete your own account.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
user = User.query.filter_by(id=admin_id, userType=UserType.LOCAL).first()
|
||||
if not user:
|
||||
return jsonify({
|
||||
'error': {'code': 'ADMIN_NOT_FOUND', 'message': 'Admin user not found.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
try:
|
||||
db.session.delete(user)
|
||||
db.session.commit()
|
||||
log_event(EventType.SETTING_CHANGE, f"Admin user '{user.localUsername}' deleted via API.", admin_id=current_user.id)
|
||||
except Exception as exc:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {'code': 'DELETE_FAILED', 'message': str(exc)},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
return jsonify({'data': {'success': True}, 'meta': {'request_id': request_id}}), 200
|
||||
331
app/routes/api_v1/auth.py
Normal file
331
app/routes/api_v1/auth.py
Normal file
@@ -0,0 +1,331 @@
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app, g
|
||||
from flask_login import login_required, current_user, login_user, logout_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import User, UserType, Setting, EventType
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import get_csrf_token, log_event
|
||||
|
||||
|
||||
def _serialize_user(user: User | None) -> dict | None:
|
||||
if not user:
|
||||
return None
|
||||
|
||||
return {
|
||||
'uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'email': user.email or user.discord_email,
|
||||
'user_type': user.userType.value if hasattr(user.userType, 'value') else str(user.userType),
|
||||
'display_name': getattr(user, 'get_display_name', lambda: None)(),
|
||||
'permissions': [role.name for role in getattr(user, 'admin_roles', [])],
|
||||
'admin_roles': [role.name for role in getattr(user, 'admin_roles', [])],
|
||||
'user_roles': [role.name for role in getattr(user, 'user_roles', [])],
|
||||
'has_admin_access': getattr(user, 'has_admin_access', lambda: False)(),
|
||||
'is_active': getattr(user, 'is_active', True),
|
||||
'last_login_at': user.last_login_at.isoformat() if getattr(user, 'last_login_at', None) else None,
|
||||
'force_password_change': getattr(user, 'force_password_change', False)
|
||||
}
|
||||
|
||||
|
||||
def _admin_login_query(identifier: str):
|
||||
lowered = identifier.lower()
|
||||
return User.query.filter(
|
||||
User.userType.in_([UserType.OWNER, UserType.LOCAL])
|
||||
).filter(
|
||||
func.lower(User.localUsername) == lowered
|
||||
).first()
|
||||
|
||||
|
||||
def _find_admin_user(identifier: str) -> User | None:
|
||||
if not identifier:
|
||||
return None
|
||||
|
||||
lowered = identifier.strip().lower()
|
||||
if not lowered:
|
||||
return None
|
||||
|
||||
user = _admin_login_query(lowered)
|
||||
if user:
|
||||
return user
|
||||
|
||||
return User.query.filter(
|
||||
User.userType.in_([UserType.OWNER, UserType.LOCAL])
|
||||
).filter(
|
||||
func.lower(User.email) == lowered
|
||||
).first()
|
||||
|
||||
|
||||
def _issue_session_payload(user: User | None):
|
||||
return {
|
||||
'user': _serialize_user(user),
|
||||
'csrf_token': get_csrf_token(),
|
||||
'feature_flags': {},
|
||||
'setup_complete': getattr(g, 'setup_complete', False),
|
||||
'force_password_change': getattr(user, 'force_password_change', False) if user else False
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/auth/csrf-token', methods=['GET'])
|
||||
def issue_csrf_token():
|
||||
"""Expose a CSRF token for clients prior to authenticating."""
|
||||
request_id = str(uuid4())
|
||||
token = get_csrf_token()
|
||||
response = jsonify({
|
||||
'data': {
|
||||
'csrf_token': token
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
})
|
||||
response.headers['Cache-Control'] = 'no-store'
|
||||
return response, 200
|
||||
|
||||
|
||||
@bp.route('/auth/login', methods=['POST'])
|
||||
def admin_login():
|
||||
"""Authenticate an owner or admin-capable local account."""
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
username = (payload.get('username') or '').strip()
|
||||
password = payload.get('password') or ''
|
||||
remember = bool(payload.get('remember', False))
|
||||
|
||||
if not username or not password:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'Username and password are required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
owner_exists = User.get_owner()
|
||||
if not owner_exists:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SETUP_REQUIRED',
|
||||
'message': 'Owner account not configured. Complete setup before logging in.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
candidate = _find_admin_user(username)
|
||||
|
||||
if not candidate or not candidate.check_password(password):
|
||||
log_event(EventType.ADMIN_LOGIN_FAIL, f"Failed admin login attempt for '{username}'.")
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_CREDENTIALS',
|
||||
'message': 'Invalid username or password.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 401
|
||||
|
||||
if candidate.userType != UserType.OWNER and not candidate.has_admin_access():
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ADMIN_ACCESS_REQUIRED',
|
||||
'message': 'Account does not have admin access.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
if not candidate.is_active:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ACCOUNT_DISABLED',
|
||||
'message': 'Account is disabled.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
login_user(candidate, remember=remember)
|
||||
candidate.last_login_at = datetime.utcnow()
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to persist login timestamp: {exc}", exc_info=True)
|
||||
db.session.rollback()
|
||||
|
||||
log_event(EventType.ADMIN_LOGIN_SUCCESS, f"Admin '{candidate.localUsername}' logged in.", admin_id=candidate.id)
|
||||
|
||||
return jsonify({
|
||||
'data': _issue_session_payload(candidate),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/auth/logout', methods=['POST'])
|
||||
@login_required
|
||||
def admin_logout():
|
||||
"""Terminate the current session."""
|
||||
request_id = str(uuid4())
|
||||
actor = _serialize_user(current_user)
|
||||
logout_user()
|
||||
if actor:
|
||||
log_event(EventType.ADMIN_LOGOUT, f"User '{actor.get('username')}' logged out.")
|
||||
|
||||
return jsonify({
|
||||
'data': {'success': True},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/auth/change-password', methods=['POST'])
|
||||
@login_required
|
||||
def change_password():
|
||||
"""Allow an authenticated admin to change their password."""
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
current_password = payload.get('current_password') or ''
|
||||
new_password = payload.get('new_password') or ''
|
||||
|
||||
if not current_password or not new_password:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'Both current_password and new_password are required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if len(new_password) < 8:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'WEAK_PASSWORD',
|
||||
'message': 'New password must be at least 8 characters long.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
if not current_user.check_password(current_password):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_CREDENTIALS',
|
||||
'message': 'Current password is incorrect.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 401
|
||||
|
||||
current_user.set_password(new_password)
|
||||
current_user.force_password_change = False
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to update password: {exc}", exc_info=True)
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'PASSWORD_UPDATE_FAILED',
|
||||
'message': 'Failed to update password.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
log_event(EventType.ADMIN_PASSWORD_CHANGE, f"Password changed for '{current_user.localUsername}'.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({
|
||||
'data': {'success': True},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/auth/set-password', methods=['POST'])
|
||||
@login_required
|
||||
def set_password():
|
||||
"""Set an initial password for accounts flagged for reset."""
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
new_password = payload.get('new_password') or ''
|
||||
|
||||
if not new_password:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'new_password is required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if len(new_password) < 8:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'WEAK_PASSWORD',
|
||||
'message': 'New password must be at least 8 characters long.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
if not current_user.force_password_change and current_user.password_hash:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'PASSWORD_CHANGE_NOT_REQUIRED',
|
||||
'message': 'Account is not flagged for password reset.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
current_user.set_password(new_password)
|
||||
current_user.force_password_change = False
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to set password: {exc}", exc_info=True)
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'PASSWORD_UPDATE_FAILED',
|
||||
'message': 'Failed to set password.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
log_event(EventType.ADMIN_PASSWORD_CHANGE, f"Password set for '{current_user.localUsername}'.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({
|
||||
'data': {'success': True},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/auth/session', methods=['GET'])
|
||||
@login_required
|
||||
def get_session():
|
||||
"""
|
||||
Returns information about the current authenticated admin session.
|
||||
React can use this to bootstrap user state and enforce route guards.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
response = {
|
||||
'data': _issue_session_payload(current_user),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'config': {
|
||||
'allow_user_accounts': Setting.get_bool('ALLOW_USER_ACCOUNTS', False)
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
122
app/routes/api_v1/dashboard.py
Normal file
122
app/routes/api_v1/dashboard.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.routes.api import get_fresh_server_status
|
||||
|
||||
|
||||
def _serialize_datetime(value):
|
||||
if not value:
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
try:
|
||||
return value.isoformat()
|
||||
except Exception:
|
||||
return value.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_server_status(raw_status: dict) -> dict:
|
||||
if not raw_status:
|
||||
return {
|
||||
'summary': {
|
||||
'total_servers': 0,
|
||||
'online': 0,
|
||||
'offline': 0
|
||||
},
|
||||
'servers': [],
|
||||
'grouped_by_service': []
|
||||
}
|
||||
|
||||
if raw_status.get('multi_server'):
|
||||
servers = [
|
||||
{
|
||||
'id': server.get('server_id'),
|
||||
'service_type': server.get('service_type'),
|
||||
'name': server.get('name') or server.get('custom_name'),
|
||||
'nickname': server.get('custom_name'),
|
||||
'actual_server_name': server.get('actual_server_name'),
|
||||
'online': server.get('online'),
|
||||
'version': server.get('version'),
|
||||
'last_check_time': _serialize_datetime(server.get('last_check_time')),
|
||||
'error_message': server.get('error_message'),
|
||||
'url': server.get('url')
|
||||
}
|
||||
for server in raw_status.get('all_statuses', [])
|
||||
]
|
||||
|
||||
groups = []
|
||||
for service_type, payload in (raw_status.get('servers_by_service') or {}).items():
|
||||
groups.append({
|
||||
'service_type': service_type,
|
||||
'service_name': payload.get('service_name'),
|
||||
'online_count': payload.get('online_count', 0),
|
||||
'offline_count': payload.get('offline_count', 0),
|
||||
'total_count': payload.get('total_count', 0),
|
||||
'servers': [
|
||||
{
|
||||
'id': server.get('server_id'),
|
||||
'name': server.get('name') or server.get('custom_name'),
|
||||
'online': server.get('online'),
|
||||
'version': server.get('version')
|
||||
}
|
||||
for server in payload.get('servers', [])
|
||||
]
|
||||
})
|
||||
|
||||
summary = {
|
||||
'total_servers': len(servers),
|
||||
'online': raw_status.get('online_count', 0),
|
||||
'offline': raw_status.get('offline_count', 0),
|
||||
}
|
||||
else:
|
||||
servers = [{
|
||||
'id': raw_status.get('server_id'),
|
||||
'service_type': raw_status.get('service_type'),
|
||||
'name': raw_status.get('name'),
|
||||
'nickname': raw_status.get('friendly_name'),
|
||||
'actual_server_name': raw_status.get('friendly_name'),
|
||||
'online': raw_status.get('online'),
|
||||
'version': raw_status.get('version'),
|
||||
'last_check_time': _serialize_datetime(raw_status.get('last_check_time')),
|
||||
'error_message': raw_status.get('error_message'),
|
||||
'url': raw_status.get('url')
|
||||
}]
|
||||
summary = {
|
||||
'total_servers': 1,
|
||||
'online': 1 if raw_status.get('online') else 0,
|
||||
'offline': 0 if raw_status.get('online') else 1,
|
||||
}
|
||||
groups = []
|
||||
|
||||
return {
|
||||
'summary': summary,
|
||||
'servers': servers,
|
||||
'grouped_by_service': groups
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/server-status', methods=['GET'])
|
||||
@login_required
|
||||
def get_server_status():
|
||||
"""
|
||||
JSON variant of the dashboard server status widget.
|
||||
Provides counts and per-server details for the React frontend.
|
||||
"""
|
||||
raw_status = get_fresh_server_status()
|
||||
normalized = _normalize_server_status(raw_status)
|
||||
|
||||
request_id = str(uuid4())
|
||||
|
||||
response = {
|
||||
'data': normalized,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
226
app/routes/api_v1/history.py
Normal file
226
app/routes/api_v1/history.py
Normal file
@@ -0,0 +1,226 @@
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import desc
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import HistoryLog, EventType
|
||||
|
||||
|
||||
def _parse_iso_datetime(value: str):
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
cleaned = value.replace('Z', '+00:00')
|
||||
return datetime.fromisoformat(cleaned)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
@bp.route('/history/recent', methods=['GET'])
|
||||
@login_required
|
||||
def get_recent_history():
|
||||
"""
|
||||
Returns recent HistoryLog entries for dashboard widgets or activity feeds.
|
||||
Supports filtering by event_type, invite, and time window.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
page = max(1, int(request.args.get('page', 1)))
|
||||
page_size = request.args.get('page_size', 25)
|
||||
try:
|
||||
page_size = int(page_size)
|
||||
except (TypeError, ValueError):
|
||||
page_size = 25
|
||||
page_size = max(1, min(page_size, 100))
|
||||
|
||||
event_types_param = request.args.get('event_types')
|
||||
invite_id = request.args.get('invite_id', type=int)
|
||||
owner_id = request.args.get('owner_id', type=int)
|
||||
local_user_id = request.args.get('local_user_id', type=int)
|
||||
since = _parse_iso_datetime(request.args.get('since'))
|
||||
|
||||
query = HistoryLog.query.order_by(desc(HistoryLog.timestamp))
|
||||
|
||||
if event_types_param:
|
||||
requested = []
|
||||
for item in event_types_param.split(','):
|
||||
key = item.strip().upper()
|
||||
if not key:
|
||||
continue
|
||||
try:
|
||||
requested.append(EventType[key])
|
||||
except KeyError:
|
||||
continue
|
||||
if requested:
|
||||
query = query.filter(HistoryLog.event_type.in_(requested))
|
||||
|
||||
if invite_id:
|
||||
query = query.filter(HistoryLog.invite_id == invite_id)
|
||||
|
||||
if owner_id:
|
||||
query = query.filter(HistoryLog.owner_id == owner_id)
|
||||
|
||||
if local_user_id:
|
||||
query = query.filter(HistoryLog.local_user_id == local_user_id)
|
||||
|
||||
if since:
|
||||
query = query.filter(HistoryLog.timestamp >= since)
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
logs = [
|
||||
{
|
||||
'id': log.id,
|
||||
'timestamp': log.timestamp.isoformat() if log.timestamp else None,
|
||||
'event_type': log.event_type.value if log.event_type else None,
|
||||
'message': log.message,
|
||||
'details': log.details or {},
|
||||
'owner_id': log.owner_id,
|
||||
'local_user_id': log.local_user_id,
|
||||
'invite_id': log.invite_id
|
||||
}
|
||||
for log in pagination.items
|
||||
]
|
||||
|
||||
response = {
|
||||
'data': logs,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'event_types': event_types_param.split(',') if event_types_param else [],
|
||||
'invite_id': invite_id,
|
||||
'owner_id': owner_id,
|
||||
'local_user_id': local_user_id,
|
||||
'since': since.isoformat() if since else None
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
|
||||
|
||||
@bp.route('/history', methods=['GET'])
|
||||
@login_required
|
||||
def search_history():
|
||||
"""
|
||||
Advanced history search endpoint with comprehensive filtering.
|
||||
Supports searching by event type, user, date range, and text search.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
page = max(1, int(request.args.get('page', 1)))
|
||||
page_size = request.args.get('page_size', 25)
|
||||
try:
|
||||
page_size = int(page_size)
|
||||
except (TypeError, ValueError):
|
||||
page_size = 25
|
||||
page_size = max(1, min(page_size, 100))
|
||||
|
||||
# Filter parameters
|
||||
event_types_param = request.args.get('event_types')
|
||||
search_text = request.args.get('search', '').strip()
|
||||
invite_id = request.args.get('invite_id', type=int)
|
||||
owner_id = request.args.get('owner_id', type=int)
|
||||
local_user_id = request.args.get('local_user_id', type=int)
|
||||
user_uuid = request.args.get('user_uuid')
|
||||
|
||||
# Date range filters
|
||||
date_from = _parse_iso_datetime(request.args.get('date_from'))
|
||||
date_to = _parse_iso_datetime(request.args.get('date_to'))
|
||||
|
||||
query = HistoryLog.query.order_by(desc(HistoryLog.timestamp))
|
||||
|
||||
# Event type filter
|
||||
if event_types_param:
|
||||
requested = []
|
||||
for item in event_types_param.split(','):
|
||||
key = item.strip().upper()
|
||||
if not key:
|
||||
continue
|
||||
try:
|
||||
requested.append(EventType[key])
|
||||
except KeyError:
|
||||
continue
|
||||
if requested:
|
||||
query = query.filter(HistoryLog.event_type.in_(requested))
|
||||
|
||||
# Text search in message
|
||||
if search_text:
|
||||
query = query.filter(HistoryLog.message.ilike(f'%{search_text}%'))
|
||||
|
||||
# User filters
|
||||
if invite_id:
|
||||
query = query.filter(HistoryLog.invite_id == invite_id)
|
||||
|
||||
if owner_id:
|
||||
query = query.filter(HistoryLog.owner_id == owner_id)
|
||||
|
||||
if local_user_id:
|
||||
query = query.filter(HistoryLog.local_user_id == local_user_id)
|
||||
|
||||
if user_uuid:
|
||||
# Find user by UUID and filter by their ID
|
||||
from app.models import User
|
||||
user = User.query.filter_by(uuid=user_uuid).first()
|
||||
if user:
|
||||
query = query.filter((HistoryLog.owner_id == user.id) | (HistoryLog.local_user_id == user.id))
|
||||
|
||||
# Date range filters
|
||||
if date_from:
|
||||
query = query.filter(HistoryLog.timestamp >= date_from)
|
||||
|
||||
if date_to:
|
||||
query = query.filter(HistoryLog.timestamp <= date_to)
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
logs = [
|
||||
{
|
||||
'id': log.id,
|
||||
'timestamp': log.timestamp.isoformat() if log.timestamp else None,
|
||||
'event_type': log.event_type.value if log.event_type else None,
|
||||
'message': log.message,
|
||||
'details': log.details or {},
|
||||
'owner_id': log.owner_id,
|
||||
'local_user_id': log.local_user_id,
|
||||
'invite_id': log.invite_id,
|
||||
# Add related object info if available
|
||||
'owner_username': log.owner.get_display_name() if log.owner else None,
|
||||
'affected_user_username': log.affected_local_user.get_display_name() if log.affected_local_user else None,
|
||||
'invite_token': log.related_invite.token if log.related_invite else None
|
||||
}
|
||||
for log in pagination.items
|
||||
]
|
||||
|
||||
response = {
|
||||
'data': logs,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'event_types': event_types_param.split(',') if event_types_param else [],
|
||||
'search_text': search_text or None,
|
||||
'invite_id': invite_id,
|
||||
'owner_id': owner_id,
|
||||
'local_user_id': local_user_id,
|
||||
'user_uuid': user_uuid,
|
||||
'date_from': date_from.isoformat() if date_from else None,
|
||||
'date_to': date_to.isoformat() if date_to else None
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
27
app/routes/api_v1/invite_public.py
Normal file
27
app/routes/api_v1/invite_public.py
Normal file
@@ -0,0 +1,27 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Invite
|
||||
|
||||
def _serialize_invite(invite: Invite):
|
||||
return {
|
||||
'token': invite.token,
|
||||
'custom_path': invite.custom_path,
|
||||
'expires_at': invite.expires_at.isoformat() if invite.expires_at else None,
|
||||
'max_uses': invite.max_uses,
|
||||
'current_uses': invite.current_uses,
|
||||
'is_active': invite.is_active,
|
||||
'grant_library_ids': invite.grant_library_ids,
|
||||
'allow_downloads': invite.allow_downloads,
|
||||
}
|
||||
|
||||
@bp.route('/public/invite/<token>', methods=['GET'])
|
||||
def validate_invite(token):
|
||||
request_id = str(uuid4())
|
||||
invite = Invite.query.filter((Invite.token == token) | (Invite.custom_path == token)).first()
|
||||
if not invite:
|
||||
return jsonify({'error': {'code': 'INVITE_NOT_FOUND', 'message': 'Invite not found.'}, 'meta': {'request_id': request_id}}), 404
|
||||
return jsonify({'data': _serialize_invite(invite), 'meta': {'request_id': request_id}})
|
||||
362
app/routes/api_v1/invites.py
Normal file
362
app/routes/api_v1/invites.py
Normal file
@@ -0,0 +1,362 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Invite, InviteUsage, Setting, User
|
||||
from app.models_media_services import MediaLibrary, MediaServer
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required
|
||||
from app.utils.timezone_utils import utcnow
|
||||
|
||||
|
||||
def _serialize_invite(invite: Invite, detailed: bool = False):
|
||||
# Basic server info for list view
|
||||
servers_basic = []
|
||||
for server in invite.servers:
|
||||
servers_basic.append({
|
||||
'id': server.id,
|
||||
'name': server.server_nickname,
|
||||
'service_type': server.service_type.value,
|
||||
})
|
||||
|
||||
# Library information for list view
|
||||
grant_ids = invite.grant_library_ids or []
|
||||
grants_all_libraries = len(grant_ids) == 0
|
||||
libraries_info = []
|
||||
|
||||
if not grants_all_libraries and grant_ids:
|
||||
# Get library details for the granted IDs
|
||||
for lib_id in grant_ids:
|
||||
# Find the library across all servers
|
||||
for server in invite.servers:
|
||||
db_libraries = MediaLibrary.query.filter_by(server_id=server.id).all()
|
||||
for db_lib in db_libraries:
|
||||
# Match by internal_id for Kavita, external_id for others
|
||||
identifier = str(db_lib.internal_id) if server.service_type.value.lower() == 'kavita' and db_lib.internal_id else str(db_lib.external_id)
|
||||
if identifier == lib_id:
|
||||
libraries_info.append({
|
||||
'id': lib_id,
|
||||
'name': db_lib.name,
|
||||
'server_name': server.server_nickname,
|
||||
'service_type': server.service_type.value,
|
||||
})
|
||||
break
|
||||
|
||||
base = {
|
||||
'id': invite.id,
|
||||
'token': invite.token,
|
||||
'custom_path': invite.custom_path,
|
||||
'expires_at': invite.expires_at.isoformat() if invite.expires_at else None,
|
||||
'max_uses': invite.max_uses,
|
||||
'current_uses': invite.current_uses,
|
||||
'is_active': invite.is_active,
|
||||
'grant_library_ids': invite.grant_library_ids,
|
||||
'allow_downloads': invite.allow_downloads,
|
||||
'created_at': invite.created_at.isoformat() if invite.created_at else None,
|
||||
'updated_at': invite.updated_at.isoformat() if invite.updated_at else None,
|
||||
'require_discord_auth': bool(invite.require_discord_auth),
|
||||
'require_discord_guild_membership': bool(invite.require_discord_guild_membership),
|
||||
'servers': servers_basic, # Include servers in basic view
|
||||
'libraries': libraries_info, # Include library details in basic view
|
||||
'grants_all_libraries': grants_all_libraries, # Flag to indicate "All Libraries"
|
||||
'invite_to_plex_home': bool(invite.invite_to_plex_home),
|
||||
'allow_live_tv': bool(invite.allow_live_tv),
|
||||
'membership_duration_days': invite.membership_duration_days,
|
||||
}
|
||||
|
||||
if not detailed:
|
||||
return base
|
||||
|
||||
share_base_url = Setting.get('APP_BASE_URL')
|
||||
share_url = invite.get_full_url(share_base_url) if share_base_url else None
|
||||
grant_ids = invite.grant_library_ids or []
|
||||
grants_all_libraries = grant_ids == [] # Empty list signifies all libraries
|
||||
|
||||
servers_data = []
|
||||
for server in invite.servers:
|
||||
libraries = []
|
||||
db_libraries = MediaLibrary.query.filter_by(server_id=server.id).all()
|
||||
|
||||
for lib in db_libraries:
|
||||
identifier = None
|
||||
if server.service_type.value.lower() == 'kavita' and lib.internal_id:
|
||||
identifier = str(lib.internal_id)
|
||||
else:
|
||||
identifier = str(lib.external_id)
|
||||
|
||||
libraries.append({
|
||||
'id': identifier,
|
||||
'name': lib.name,
|
||||
'library_type': lib.library_type,
|
||||
'selected': True if grants_all_libraries else identifier in grant_ids,
|
||||
'external_id': lib.external_id,
|
||||
'internal_id': lib.internal_id,
|
||||
})
|
||||
|
||||
servers_data.append({
|
||||
'id': server.id,
|
||||
'name': server.server_nickname,
|
||||
'service_type': server.service_type.value,
|
||||
'url': server.url,
|
||||
'public_url': server.public_url,
|
||||
'is_active': server.is_active,
|
||||
'libraries': libraries,
|
||||
})
|
||||
|
||||
usages = sorted(invite.invite_usages, key=lambda usage: usage.used_at or utcnow(), reverse=True)
|
||||
usage_data = []
|
||||
accepted_count = 0
|
||||
plex_success_count = 0
|
||||
discord_success_count = 0
|
||||
|
||||
for usage in usages:
|
||||
accepted_count += 1 if usage.accepted_invite else 0
|
||||
plex_success_count += 1 if usage.plex_auth_successful else 0
|
||||
discord_success_count += 1 if usage.discord_auth_successful else 0
|
||||
usage_data.append({
|
||||
'id': usage.id,
|
||||
'used_at': usage.used_at.isoformat() if usage.used_at else None,
|
||||
'ip_address': usage.ip_address,
|
||||
'plex_username': usage.plex_username,
|
||||
'plex_email': usage.plex_email,
|
||||
'discord_username': usage.discord_username,
|
||||
'discord_user_id': usage.discord_user_id,
|
||||
'accepted_invite': usage.accepted_invite,
|
||||
'status_message': usage.status_message,
|
||||
'user_uuid': usage.userId,
|
||||
'plex_auth_successful': usage.plex_auth_successful,
|
||||
'discord_auth_successful': usage.discord_auth_successful,
|
||||
})
|
||||
|
||||
remaining_uses = None
|
||||
if invite.max_uses is not None:
|
||||
remaining_uses = max(invite.max_uses - invite.current_uses, 0)
|
||||
|
||||
base.update({
|
||||
'share_url': share_url,
|
||||
'is_expired': invite.is_expired,
|
||||
'has_reached_max_uses': invite.has_reached_max_uses,
|
||||
'is_usable': invite.is_usable,
|
||||
'remaining_uses': remaining_uses,
|
||||
'membership_duration_days': invite.membership_duration_days,
|
||||
'require_discord_auth': bool(invite.require_discord_auth),
|
||||
'require_discord_guild_membership': bool(invite.require_discord_guild_membership),
|
||||
'grant_purge_whitelist': bool(invite.grant_purge_whitelist),
|
||||
'grant_bot_whitelist': bool(invite.grant_bot_whitelist),
|
||||
'invite_to_plex_home': bool(invite.invite_to_plex_home),
|
||||
'allow_live_tv': bool(invite.allow_live_tv),
|
||||
'servers': servers_data,
|
||||
'library_selection_mode': 'all' if grants_all_libraries else 'custom',
|
||||
'usage': usage_data,
|
||||
'usage_summary': {
|
||||
'total': len(usage_data),
|
||||
'accepted': accepted_count,
|
||||
'pending': len(usage_data) - accepted_count,
|
||||
'plex_auth_successful': plex_success_count,
|
||||
'discord_auth_successful': discord_success_count,
|
||||
'last_used_at': usage_data[0]['used_at'] if usage_data else None,
|
||||
},
|
||||
'creator': {
|
||||
'id': invite.created_by_owner_id,
|
||||
'display_name': invite.owner_creator.get_display_name() if isinstance(invite.owner_creator, User) and hasattr(invite.owner_creator, 'get_display_name') else None,
|
||||
} if invite.created_by_owner_id else None,
|
||||
})
|
||||
|
||||
return base
|
||||
|
||||
|
||||
@bp.route('/invites', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def list_invites():
|
||||
request_id = str(uuid4())
|
||||
page = max(1, request.args.get('page', type=int) or 1)
|
||||
page_size = max(1, min(request.args.get('page_size', type=int) or 25, 100))
|
||||
status = request.args.get('status')
|
||||
search = (request.args.get('search') or '').strip()
|
||||
server_id = request.args.get('server_id', type=int)
|
||||
|
||||
query = Invite.query.order_by(Invite.created_at.desc())
|
||||
now = utcnow()
|
||||
|
||||
if status == 'active':
|
||||
query = query.filter(Invite.is_active.is_(True))
|
||||
elif status == 'inactive':
|
||||
query = query.filter(Invite.is_active.is_(False))
|
||||
elif status == 'expired':
|
||||
query = query.filter(Invite.expires_at.isnot(None), Invite.expires_at < now)
|
||||
elif status == 'maxed':
|
||||
query = query.filter(Invite.max_uses.isnot(None), Invite.current_uses >= Invite.max_uses)
|
||||
elif status == 'usable':
|
||||
query = query.filter(
|
||||
Invite.is_active.is_(True),
|
||||
or_(Invite.expires_at.is_(None), Invite.expires_at >= now),
|
||||
or_(Invite.max_uses.is_(None), Invite.current_uses < Invite.max_uses)
|
||||
)
|
||||
|
||||
if search:
|
||||
like_pattern = f"%{search}%"
|
||||
query = query.filter(
|
||||
or_(
|
||||
Invite.token.ilike(like_pattern),
|
||||
Invite.custom_path.ilike(like_pattern)
|
||||
)
|
||||
)
|
||||
|
||||
if server_id:
|
||||
query = query.filter(Invite.servers.any(MediaServer.id == server_id))
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_invite(inv) for inv in pagination.items],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {'status': status, 'search': search, 'server_id': server_id}
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/invites/<int:invite_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def get_invite(invite_id):
|
||||
request_id = str(uuid4())
|
||||
invite = Invite.query.get_or_404(invite_id)
|
||||
return jsonify({'data': _serialize_invite(invite, detailed=True), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/invites/summary', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def invite_summary():
|
||||
request_id = str(uuid4())
|
||||
now = utcnow()
|
||||
|
||||
base_query = Invite.query
|
||||
total = base_query.count()
|
||||
active = base_query.filter(Invite.is_active.is_(True)).count()
|
||||
inactive = base_query.filter(Invite.is_active.is_(False)).count()
|
||||
expired = base_query.filter(Invite.expires_at.isnot(None), Invite.expires_at < now).count()
|
||||
maxed = base_query.filter(Invite.max_uses.isnot(None), Invite.current_uses >= Invite.max_uses).count()
|
||||
usable = base_query.filter(
|
||||
Invite.is_active.is_(True),
|
||||
or_(Invite.expires_at.is_(None), Invite.expires_at >= now),
|
||||
or_(Invite.max_uses.is_(None), Invite.current_uses < Invite.max_uses)
|
||||
).count()
|
||||
|
||||
recent_invites = base_query.order_by(Invite.created_at.desc()).limit(5).all()
|
||||
recent_usages = InviteUsage.query.order_by(InviteUsage.used_at.desc()).limit(5).all()
|
||||
|
||||
recent_invite_payload = [
|
||||
{
|
||||
'id': invite.id,
|
||||
'token': invite.token,
|
||||
'custom_path': invite.custom_path,
|
||||
'created_at': invite.created_at.isoformat() if invite.created_at else None,
|
||||
'is_active': invite.is_active,
|
||||
'is_expired': invite.is_expired,
|
||||
'has_reached_max_uses': invite.has_reached_max_uses,
|
||||
'current_uses': invite.current_uses,
|
||||
'max_uses': invite.max_uses,
|
||||
}
|
||||
for invite in recent_invites
|
||||
]
|
||||
|
||||
recent_usage_payload = [
|
||||
{
|
||||
'id': usage.id,
|
||||
'invite_id': usage.invite_id,
|
||||
'used_at': usage.used_at.isoformat() if usage.used_at else None,
|
||||
'accepted_invite': usage.accepted_invite,
|
||||
'plex_username': usage.plex_username,
|
||||
'discord_username': usage.discord_username,
|
||||
'status_message': usage.status_message,
|
||||
}
|
||||
for usage in recent_usages
|
||||
]
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'counts': {
|
||||
'total': total,
|
||||
'active': active,
|
||||
'inactive': inactive,
|
||||
'expired': expired,
|
||||
'maxed': maxed,
|
||||
'usable': usable,
|
||||
},
|
||||
'recent_invites': recent_invite_payload,
|
||||
'recent_usages': recent_usage_payload,
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/invites', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def create_invite():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json() or {}
|
||||
invite = Invite(
|
||||
custom_path=payload.get('custom_path'),
|
||||
expires_at=datetime.fromisoformat(payload['expires_at']) if payload.get('expires_at') else None,
|
||||
max_uses=payload.get('max_uses'),
|
||||
grant_library_ids=payload.get('grant_library_ids', []),
|
||||
allow_downloads=payload.get('allow_downloads', False),
|
||||
is_active=payload.get('is_active', True)
|
||||
)
|
||||
db.session.add(invite)
|
||||
db.session.commit()
|
||||
return jsonify({'data': _serialize_invite(invite), 'meta': {'request_id': request_id}}), 201
|
||||
|
||||
|
||||
@bp.route('/invites/<int:invite_id>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def update_invite(invite_id):
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json() or {}
|
||||
invite = Invite.query.get_or_404(invite_id)
|
||||
|
||||
if 'custom_path' in payload:
|
||||
invite.custom_path = payload['custom_path']
|
||||
if 'expires_at' in payload:
|
||||
invite.expires_at = datetime.fromisoformat(payload['expires_at']) if payload['expires_at'] else None
|
||||
if 'max_uses' in payload:
|
||||
invite.max_uses = payload['max_uses']
|
||||
if 'grant_library_ids' in payload:
|
||||
invite.grant_library_ids = payload['grant_library_ids']
|
||||
if 'allow_downloads' in payload:
|
||||
invite.allow_downloads = payload['allow_downloads']
|
||||
if 'is_active' in payload:
|
||||
invite.is_active = payload['is_active']
|
||||
|
||||
db.session.commit()
|
||||
return jsonify({'data': _serialize_invite(invite), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/invites/<int:invite_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def delete_invite(invite_id):
|
||||
request_id = str(uuid4())
|
||||
invite = Invite.query.get_or_404(invite_id)
|
||||
db.session.delete(invite)
|
||||
db.session.commit()
|
||||
return jsonify({'data': {'success': True}, 'meta': {'request_id': request_id}})
|
||||
50
app/routes/api_v1/invites_bulk.py
Normal file
50
app/routes/api_v1/invites_bulk.py
Normal file
@@ -0,0 +1,50 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Invite
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required
|
||||
|
||||
|
||||
@bp.route('/invites/bulk', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_invites')
|
||||
def bulk_invite_action():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json() or {}
|
||||
ids = payload.get('ids', [])
|
||||
action = payload.get('action')
|
||||
|
||||
if not ids or action not in {'enable', 'disable', 'delete'}:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_BULK_REQUEST',
|
||||
'message': 'Provide ids list and action of enable/disable/delete.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
invites = Invite.query.filter(Invite.id.in_(ids)).all()
|
||||
|
||||
if action == 'delete':
|
||||
for invite in invites:
|
||||
db.session.delete(invite)
|
||||
else:
|
||||
active_state = action == 'enable'
|
||||
for invite in invites:
|
||||
invite.is_active = active_state
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'processed_ids': [invite.id for invite in invites],
|
||||
'action': action
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
})
|
||||
864
app/routes/api_v1/libraries.py
Normal file
864
app/routes/api_v1/libraries.py
Normal file
@@ -0,0 +1,864 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import permission_required
|
||||
from app.models_media_services import MediaLibrary, MediaServer
|
||||
from app.extensions import db
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def _serialize_library(library, include_server=False):
|
||||
"""Serialize a MediaLibrary object to JSON"""
|
||||
data = {
|
||||
'id': library.id,
|
||||
'internal_id': library.internal_id,
|
||||
'external_id': library.external_id,
|
||||
'name': library.name,
|
||||
'library_type': library.library_type,
|
||||
'item_count': library.item_count,
|
||||
'last_scanned': library.last_scanned.isoformat() if library.last_scanned else None,
|
||||
'server_id': library.server_id,
|
||||
'created_at': library.created_at.isoformat() if library.created_at else None,
|
||||
'updated_at': library.updated_at.isoformat() if library.updated_at else None
|
||||
}
|
||||
|
||||
if include_server and library.server:
|
||||
data['server'] = {
|
||||
'id': library.server.id,
|
||||
'server_nickname': library.server.server_nickname,
|
||||
'server_name': library.server.server_name,
|
||||
'service_type': library.server.service_type.value
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@bp.route('/libraries', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def list_libraries():
|
||||
"""List all libraries with optional filtering"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Get query parameters
|
||||
server_id = request.args.get('server_id', type=int)
|
||||
library_type = request.args.get('library_type')
|
||||
search = request.args.get('search', '').strip()
|
||||
include_server = request.args.get('include_server', 'false').lower() == 'true'
|
||||
|
||||
# Build query
|
||||
query = MediaLibrary.query
|
||||
|
||||
if server_id:
|
||||
query = query.filter_by(server_id=server_id)
|
||||
|
||||
if library_type:
|
||||
query = query.filter_by(library_type=library_type)
|
||||
|
||||
if search:
|
||||
query = query.filter(MediaLibrary.name.ilike(f'%{search}%'))
|
||||
|
||||
# Order by server and name
|
||||
query = query.join(MediaServer).order_by(MediaServer.server_nickname, MediaLibrary.name)
|
||||
|
||||
libraries = query.all()
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_library(lib, include_server) for lib in libraries],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'filters': {
|
||||
'server_id': server_id,
|
||||
'library_type': library_type,
|
||||
'search': search,
|
||||
'include_server': include_server
|
||||
},
|
||||
'total_count': len(libraries),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_library(library_id):
|
||||
"""Get a single library by ID"""
|
||||
request_id = str(uuid4())
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
include_server = request.args.get('include_server', 'true').lower() == 'true'
|
||||
include_items_count = request.args.get('include_items_count', 'false').lower() == 'true'
|
||||
|
||||
data = _serialize_library(library, include_server)
|
||||
|
||||
if include_items_count:
|
||||
# Count media items in this library
|
||||
from app.models_media_services import MediaItem
|
||||
items_count = MediaItem.query.filter_by(library_id=library_id).count()
|
||||
data['media_items_count'] = items_count
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/media', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def list_library_media(library_id):
|
||||
"""List media items in a library with pagination and search"""
|
||||
request_id = str(uuid4())
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
from app.models_media_services import MediaItem
|
||||
|
||||
# Pagination parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page_size = min(page_size, 100) # Max 100 items per page
|
||||
|
||||
# Search/filter parameters
|
||||
search = request.args.get('search', '').strip()
|
||||
item_type = request.args.get('item_type')
|
||||
sort_by = request.args.get('sort_by', 'title_asc')
|
||||
|
||||
current_app.logger.debug(f"GET /libraries/{library_id}/media - sort_by={sort_by}, page={page}, search='{search}'")
|
||||
|
||||
# Build query - exclude episodes as they belong to shows
|
||||
query = MediaItem.query.filter_by(library_id=library_id).filter(MediaItem.item_type != 'episode')
|
||||
|
||||
if search:
|
||||
query = query.filter(MediaItem.title.ilike(f'%{search}%'))
|
||||
|
||||
if item_type:
|
||||
query = query.filter_by(item_type=item_type)
|
||||
|
||||
# Apply sorting (but not for total_streams - we'll sort that after calculating counts)
|
||||
if sort_by.startswith('total_streams'):
|
||||
# Don't sort in DB query, we'll sort after adding stream counts
|
||||
query = query.order_by(MediaItem.title.asc())
|
||||
elif sort_by == 'title_desc':
|
||||
query = query.order_by(MediaItem.sort_title.desc().nullslast(), MediaItem.title.desc())
|
||||
elif sort_by == 'year_asc':
|
||||
query = query.order_by(MediaItem.year.asc().nullslast(), MediaItem.title.asc())
|
||||
elif sort_by == 'year_desc':
|
||||
query = query.order_by(MediaItem.year.desc().nullslast(), MediaItem.title.asc())
|
||||
else: # default title_asc
|
||||
query = query.order_by(MediaItem.sort_title.asc().nullsfirst(), MediaItem.title.asc())
|
||||
|
||||
# Get total count for pagination
|
||||
total_items = query.count()
|
||||
total_pages = (total_items + page_size - 1) // page_size
|
||||
|
||||
# For stream sorting, we need ALL items to properly paginate after sorting
|
||||
# For other sorts, we can paginate in the DB
|
||||
if sort_by.startswith('total_streams'):
|
||||
# Get all items (no pagination yet)
|
||||
current_app.logger.debug(f"Fetching ALL items for stream sorting (total: {total_items})")
|
||||
items = query.all()
|
||||
current_app.logger.debug(f"Fetched {len(items)} items")
|
||||
else:
|
||||
# Paginate normally
|
||||
items = query.offset((page - 1) * page_size).limit(page_size).all()
|
||||
|
||||
# Add stream counts to items
|
||||
from app.models_media_services import MediaStreamHistory
|
||||
items_data = []
|
||||
for item in items:
|
||||
item_dict = item.to_dict()
|
||||
|
||||
# Calculate stream count from history
|
||||
if item.item_type and item.item_type.lower() in ['show', 'series', 'tv']:
|
||||
# For TV shows, count all episodes of the show by matching grandparent_title
|
||||
from sqlalchemy import func
|
||||
|
||||
# Debug: Log what we're searching for
|
||||
current_app.logger.debug(f"Counting streams for TV show: '{item.title}' (type: {item.item_type})")
|
||||
|
||||
# Try case-insensitive match (more reliable)
|
||||
stream_count = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == item.server_id,
|
||||
func.lower(MediaStreamHistory.grandparent_title) == func.lower(item.title)
|
||||
).count()
|
||||
|
||||
current_app.logger.debug(f"Found {stream_count} streams for '{item.title}'")
|
||||
else:
|
||||
# For movies and other content, count direct matches
|
||||
stream_count = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == item.server_id,
|
||||
MediaStreamHistory.media_title == item.title
|
||||
).count()
|
||||
|
||||
# If no exact match, try case-insensitive match
|
||||
if stream_count == 0:
|
||||
from sqlalchemy import func
|
||||
stream_count = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == item.server_id,
|
||||
func.lower(MediaStreamHistory.media_title) == func.lower(item.title)
|
||||
).count()
|
||||
|
||||
item_dict['stream_count'] = stream_count
|
||||
items_data.append(item_dict)
|
||||
|
||||
# Sort by stream count if requested (now that we have the counts)
|
||||
if sort_by == 'total_streams_desc':
|
||||
items_data.sort(key=lambda x: x.get('stream_count', 0), reverse=True)
|
||||
elif sort_by == 'total_streams_asc':
|
||||
items_data.sort(key=lambda x: x.get('stream_count', 0), reverse=False)
|
||||
|
||||
# Paginate stream-sorted results
|
||||
if sort_by.startswith('total_streams'):
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
items_data = items_data[start_idx:end_idx]
|
||||
|
||||
return jsonify({
|
||||
'data': items_data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'total_items': total_items,
|
||||
'total_pages': total_pages
|
||||
},
|
||||
'filters': {
|
||||
'search': search,
|
||||
'item_type': item_type,
|
||||
'sort_by': sort_by
|
||||
},
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/media/<int:media_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_media_item(library_id, media_id):
|
||||
"""Get a single media item by ID with full details"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
from app.models_media_services import MediaItem
|
||||
|
||||
# Get the media item and verify it belongs to the specified library
|
||||
media_item = MediaItem.query.filter_by(id=media_id, library_id=library_id).first()
|
||||
|
||||
if not media_item:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'MEDIA_NOT_FOUND',
|
||||
'message': f'Media item with ID {media_id} not found in library {library_id}',
|
||||
'details': {'media_id': media_id, 'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Get library and server info
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
include_library = request.args.get('include_library', 'true').lower() == 'true'
|
||||
|
||||
data = media_item.to_dict()
|
||||
|
||||
if include_library and library:
|
||||
data['library'] = {
|
||||
'id': library.id,
|
||||
'name': library.name,
|
||||
'library_type': library.library_type,
|
||||
'server_id': library.server_id
|
||||
}
|
||||
|
||||
if library.server:
|
||||
data['library']['server'] = {
|
||||
'id': library.server.id,
|
||||
'server_nickname': library.server.server_nickname,
|
||||
'server_name': library.server.server_name,
|
||||
'service_type': library.server.service_type.value
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/stats', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_library_stats(library_id):
|
||||
"""Get statistics for a library"""
|
||||
request_id = str(uuid4())
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
days = request.args.get('days', 30, type=int)
|
||||
|
||||
from app.routes.library_modules.statistics import get_advanced_library_statistics, get_library_user_engagement_metrics
|
||||
from app.models_media_services import MediaStreamHistory
|
||||
from datetime import timedelta, timezone
|
||||
from collections import defaultdict
|
||||
|
||||
stats = get_advanced_library_statistics(library, days=days)
|
||||
user_metrics = get_library_user_engagement_metrics(library, days=days)
|
||||
|
||||
# Generate daily chart data
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# Get all streams for chart
|
||||
streams = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == library.server_id,
|
||||
MediaStreamHistory.library_name == library.name,
|
||||
MediaStreamHistory.started_at >= start_date,
|
||||
MediaStreamHistory.started_at <= end_date
|
||||
).all()
|
||||
|
||||
# Group by date
|
||||
daily_data = defaultdict(lambda: {'plays': 0, 'time': 0})
|
||||
for stream in streams:
|
||||
date_key = stream.started_at.date().isoformat()
|
||||
daily_data[date_key]['plays'] += 1
|
||||
daily_data[date_key]['time'] += (stream.duration_seconds or 0) / 60 # Convert to minutes
|
||||
|
||||
# Create chart data array
|
||||
chart_data = []
|
||||
current_date = start_date.date()
|
||||
end = end_date.date()
|
||||
while current_date <= end:
|
||||
date_key = current_date.isoformat()
|
||||
chart_data.append({
|
||||
'date': date_key,
|
||||
'label': current_date.strftime('%b %d'),
|
||||
'plays': daily_data[date_key]['plays'],
|
||||
'time': round(daily_data[date_key]['time'], 1)
|
||||
})
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'stats': stats,
|
||||
'user_metrics': user_metrics,
|
||||
'chart_data': chart_data
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'filters': {'days': days},
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/activity', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_library_activity(library_id):
|
||||
"""Get recent activity for a library"""
|
||||
request_id = str(uuid4())
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
days = request.args.get('days', 30, type=int)
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 50, type=int)
|
||||
page_size = min(page_size, 100)
|
||||
|
||||
from app.models_media_services import MediaStreamHistory
|
||||
from datetime import timedelta, timezone
|
||||
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# Query recent streams
|
||||
query = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == library.server_id,
|
||||
MediaStreamHistory.library_name == library.name,
|
||||
MediaStreamHistory.started_at >= start_date,
|
||||
MediaStreamHistory.started_at <= end_date
|
||||
).order_by(MediaStreamHistory.started_at.desc())
|
||||
|
||||
total_items = query.count()
|
||||
total_pages = (total_items + page_size - 1) // page_size
|
||||
|
||||
streams = query.offset((page - 1) * page_size).limit(page_size).all()
|
||||
|
||||
# Serialize streams
|
||||
streams_data = []
|
||||
for stream in streams:
|
||||
# Get user avatar using the same logic as get_avatar() method
|
||||
user_avatar_url = None
|
||||
if stream.user:
|
||||
user_avatar_url = stream.user.get_avatar(fallback=None)
|
||||
|
||||
# Try to find the linked media item to get poster
|
||||
thumb_path = None
|
||||
media_type = None
|
||||
|
||||
# Try to find the media item by matching title and library
|
||||
from app.models_media_services import MediaItem
|
||||
media_item = None
|
||||
|
||||
if stream.grandparent_title:
|
||||
# This is likely a TV show episode
|
||||
media_item = MediaItem.query.filter_by(
|
||||
library_id=library.id,
|
||||
title=stream.grandparent_title
|
||||
).first()
|
||||
media_type = 'episode'
|
||||
elif stream.media_title:
|
||||
# This is likely a movie or other content
|
||||
media_item = MediaItem.query.filter_by(
|
||||
library_id=library.id,
|
||||
title=stream.media_title
|
||||
).first()
|
||||
if media_item and media_item.item_type:
|
||||
media_type = media_item.item_type
|
||||
|
||||
if media_item and media_item.thumb_path:
|
||||
# Convert thumb_path to proper proxy URL
|
||||
if media_item.thumb_path.startswith('/admin/api/'):
|
||||
# Already a proxy URL with correct prefix
|
||||
thumb_path = media_item.thumb_path
|
||||
elif media_item.thumb_path.startswith('/api/'):
|
||||
# Legacy proxy URL without admin prefix - add it
|
||||
thumb_path = f"/admin{media_item.thumb_path}"
|
||||
elif media_item.thumb_path.startswith('http'):
|
||||
# Full URL - use as-is
|
||||
thumb_path = media_item.thumb_path
|
||||
else:
|
||||
# Plex format: regular path that needs proxy construction
|
||||
thumb_path = f"/admin/api/media/{library.server.service_type.value}/images/proxy?path={media_item.thumb_path.lstrip('/')}"
|
||||
|
||||
streams_data.append({
|
||||
'id': stream.id,
|
||||
'media_title': stream.media_title,
|
||||
'grandparent_title': stream.grandparent_title,
|
||||
'parent_title': stream.parent_title,
|
||||
'media_type': media_type,
|
||||
'thumb_path': thumb_path,
|
||||
'user_display_name': stream.user.get_display_name() if stream.user else 'Unknown',
|
||||
'user_avatar_url': user_avatar_url,
|
||||
'started_at': stream.started_at.isoformat() if stream.started_at else None,
|
||||
'duration_seconds': stream.duration_seconds,
|
||||
'platform': stream.platform,
|
||||
'player': stream.player,
|
||||
'product': stream.product
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'data': streams_data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'total_items': total_items,
|
||||
'total_pages': total_pages
|
||||
},
|
||||
'filters': {'days': days},
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/collections', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_library_collections(library_id):
|
||||
"""Get collections for a Plex library"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Get library and verify it exists
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Check if this is a Plex library
|
||||
if library.server.service_type.value.lower() != 'plex':
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'UNSUPPORTED_SERVICE',
|
||||
'message': 'Collections are only available for Plex libraries',
|
||||
'details': {'service_type': library.server.service_type.value}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
|
||||
# Create Plex service instance
|
||||
service = MediaServiceFactory.create_service_from_db(library.server)
|
||||
if not service or not hasattr(service, 'get_library_collections'):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_UNAVAILABLE',
|
||||
'message': 'Plex service is not available or does not support collections',
|
||||
'details': {}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 503
|
||||
|
||||
# Get collections using the library's external_id (UUID)
|
||||
collections_data = service.get_library_collections(library.external_id)
|
||||
|
||||
if collections_data.get('success'):
|
||||
return jsonify({
|
||||
'data': {
|
||||
'collections': collections_data.get('collections', []),
|
||||
'library_name': collections_data.get('library_name', library.name),
|
||||
'library_type': collections_data.get('library_type', 'unknown')
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'total_count': len(collections_data.get('collections', [])),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'COLLECTION_FETCH_FAILED',
|
||||
'message': collections_data.get('error', 'Failed to fetch collections'),
|
||||
'details': {}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INTERNAL_ERROR',
|
||||
'message': f'Error fetching collections: {str(e)}',
|
||||
'details': {}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/media/<int:media_id>/episodes', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_media_episodes(library_id, media_id):
|
||||
"""Get episodes for a TV show"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
from app.models_media_services import MediaItem
|
||||
|
||||
# Get the media item and verify it's in a TV show library
|
||||
media_item = MediaItem.query.filter_by(id=media_id, library_id=library_id).first()
|
||||
|
||||
if not media_item:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'MEDIA_NOT_FOUND',
|
||||
'message': f'Media item with ID {media_id} not found in library {library_id}',
|
||||
'details': {'media_id': media_id, 'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Get library and check if it's a TV show library
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
library_type = library.library_type.lower() if library.library_type else ''
|
||||
if library_type not in ['show', 'tv', 'series', 'tvshows']:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'NOT_A_TV_SHOW_LIBRARY',
|
||||
'message': 'This library is not a TV show library',
|
||||
'details': {'library_type': library.library_type}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Pagination and filter parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 24, type=int)
|
||||
page_size = min(page_size, 100) # Max 100 items per page
|
||||
search = request.args.get('search', '').strip()
|
||||
sort_by = request.args.get('sort_by', 'season_episode_asc')
|
||||
|
||||
# Build query for episodes
|
||||
from sqlalchemy import or_
|
||||
query = MediaItem.query.filter(
|
||||
MediaItem.library_id == library_id,
|
||||
MediaItem.item_type == 'episode',
|
||||
or_(
|
||||
MediaItem.parent_id == media_item.external_id,
|
||||
MediaItem.parent_id == media_item.rating_key
|
||||
)
|
||||
)
|
||||
|
||||
# Apply search filter
|
||||
if search:
|
||||
search_term = f'%{search}%'
|
||||
query = query.filter(
|
||||
or_(
|
||||
MediaItem.title.ilike(search_term),
|
||||
MediaItem.summary.ilike(search_term)
|
||||
)
|
||||
)
|
||||
|
||||
# Get total count before pagination
|
||||
total_items = query.count()
|
||||
total_pages = (total_items + page_size - 1) // page_size
|
||||
|
||||
# For season/episode sorting or stream count sorting, we need to fetch all and sort in Python
|
||||
if sort_by.startswith('season_episode') or sort_by.startswith('total_streams'):
|
||||
# Get all episodes (no pagination yet)
|
||||
all_episodes = query.all()
|
||||
|
||||
# Convert to dict
|
||||
episodes_data = [ep.to_dict() for ep in all_episodes]
|
||||
|
||||
# Add stream counts for each episode
|
||||
from app.models_media_services import MediaStreamHistory
|
||||
for ep_dict in episodes_data:
|
||||
# Find the original episode object to get server_id
|
||||
ep_obj = next((e for e in all_episodes if e.id == ep_dict['id']), None)
|
||||
if ep_obj:
|
||||
# Count streams for this specific episode
|
||||
stream_count = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.server_id == ep_obj.server_id,
|
||||
MediaStreamHistory.media_title == ep_obj.title
|
||||
).count()
|
||||
ep_dict['stream_count'] = stream_count
|
||||
else:
|
||||
ep_dict['stream_count'] = 0
|
||||
|
||||
# Sort based on sort_by parameter
|
||||
if sort_by.startswith('season_episode'):
|
||||
# Sort by season and episode number
|
||||
reverse = sort_by.endswith('_desc')
|
||||
def season_episode_sort_key(episode):
|
||||
season = episode.get('season_number', 0) or 0
|
||||
episode_num = episode.get('episode_number', 0) or 0
|
||||
return (season, episode_num)
|
||||
episodes_data.sort(key=season_episode_sort_key, reverse=reverse)
|
||||
elif sort_by == 'total_streams_desc':
|
||||
episodes_data.sort(key=lambda x: x.get('stream_count', 0), reverse=True)
|
||||
elif sort_by == 'total_streams_asc':
|
||||
episodes_data.sort(key=lambda x: x.get('stream_count', 0), reverse=False)
|
||||
|
||||
# Now paginate the sorted data
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
episodes_data = episodes_data[start_idx:end_idx]
|
||||
|
||||
else:
|
||||
# Apply database-level sorting for other fields
|
||||
if sort_by == 'title_asc':
|
||||
query = query.order_by(MediaItem.sort_title.asc())
|
||||
elif sort_by == 'title_desc':
|
||||
query = query.order_by(MediaItem.sort_title.desc())
|
||||
elif sort_by == 'year_asc':
|
||||
query = query.order_by(MediaItem.year.asc())
|
||||
elif sort_by == 'year_desc':
|
||||
query = query.order_by(MediaItem.year.desc())
|
||||
elif sort_by == 'added_at_asc':
|
||||
query = query.order_by(MediaItem.added_at.asc())
|
||||
elif sort_by == 'added_at_desc':
|
||||
query = query.order_by(MediaItem.added_at.desc())
|
||||
else:
|
||||
query = query.order_by(MediaItem.sort_title.asc())
|
||||
|
||||
# Paginate
|
||||
episodes = query.offset((page - 1) * page_size).limit(page_size).all()
|
||||
episodes_data = [ep.to_dict() for ep in episodes]
|
||||
|
||||
# Check if episodes need syncing
|
||||
needs_sync = False
|
||||
if media_item.last_synced:
|
||||
from datetime import timedelta
|
||||
sync_age = datetime.utcnow() - media_item.last_synced
|
||||
needs_sync = sync_age > timedelta(hours=24)
|
||||
else:
|
||||
needs_sync = True
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'episodes': episodes_data,
|
||||
'show_info': {
|
||||
'id': media_item.id,
|
||||
'title': media_item.title,
|
||||
'external_id': media_item.external_id,
|
||||
'rating_key': media_item.rating_key,
|
||||
'last_synced': media_item.last_synced.isoformat() if media_item.last_synced else None
|
||||
}
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'total_items': total_items,
|
||||
'total_pages': total_pages
|
||||
},
|
||||
'filters': {
|
||||
'search': search,
|
||||
'sort_by': sort_by
|
||||
},
|
||||
'needs_sync': needs_sync,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/libraries/<int:library_id>/media/<int:media_id>/episodes/sync', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def sync_media_episodes(library_id, media_id):
|
||||
"""Sync episodes for a TV show"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
from app.models_media_services import MediaItem
|
||||
from app.services.media_sync_service import MediaSyncService
|
||||
|
||||
# Get the media item and verify it's in a TV show library
|
||||
media_item = MediaItem.query.filter_by(id=media_id, library_id=library_id).first()
|
||||
|
||||
if not media_item:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'MEDIA_NOT_FOUND',
|
||||
'message': f'Media item with ID {media_id} not found in library {library_id}',
|
||||
'details': {'media_id': media_id, 'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Get library and check if it's a TV show library
|
||||
library = MediaLibrary.query.get(library_id)
|
||||
if not library:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_NOT_FOUND',
|
||||
'message': f'Library with ID {library_id} not found',
|
||||
'details': {'library_id': library_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
library_type = library.library_type.lower() if library.library_type else ''
|
||||
if library_type not in ['show', 'tv', 'series', 'tvshows']:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'NOT_A_TV_SHOW_LIBRARY',
|
||||
'message': 'This library is not a TV show library',
|
||||
'details': {'library_type': library.library_type}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
# Trigger episode sync
|
||||
result = MediaSyncService.sync_show_episodes(media_id)
|
||||
|
||||
if result['success']:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f"Episodes synced for {media_item.title}",
|
||||
'result': {
|
||||
'added': result.get('added', 0),
|
||||
'updated': result.get('updated', 0),
|
||||
'removed': result.get('removed', 0),
|
||||
'total': result.get('total', 0)
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SYNC_FAILED',
|
||||
'message': result.get('error', 'Failed to sync episodes'),
|
||||
'details': {}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INTERNAL_ERROR',
|
||||
'message': f'Error syncing episodes: {str(e)}',
|
||||
'details': {}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
251
app/routes/api_v1/metrics.py
Normal file
251
app/routes/api_v1/metrics.py
Normal file
@@ -0,0 +1,251 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, Invite, HistoryLog, EventType
|
||||
from app.models_media_services import MediaServer, MediaLibrary, MediaStreamHistory
|
||||
from app.extensions import db
|
||||
|
||||
|
||||
@bp.route('/metrics', methods=['GET'])
|
||||
@login_required
|
||||
def get_metrics():
|
||||
"""
|
||||
Get aggregated metrics for dashboard KPIs.
|
||||
Returns counts for users, invites, servers, active sessions, and recent activity.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Time ranges for activity metrics
|
||||
now = datetime.utcnow()
|
||||
day_ago = now - timedelta(days=1)
|
||||
week_ago = now - timedelta(days=7)
|
||||
month_ago = now - timedelta(days=30)
|
||||
|
||||
# User metrics
|
||||
total_users = User.query.filter(User.userType.in_([UserType.LOCAL, UserType.OWNER])).count()
|
||||
active_users = User.query.filter(
|
||||
User.userType.in_([UserType.LOCAL, UserType.OWNER]),
|
||||
User.is_active == True
|
||||
).count()
|
||||
|
||||
service_users = User.query.filter_by(userType=UserType.SERVICE).count()
|
||||
active_service_users = User.query.filter_by(
|
||||
userType=UserType.SERVICE,
|
||||
is_active=True
|
||||
).count()
|
||||
|
||||
# Recent user activity
|
||||
users_created_this_month = User.query.filter(
|
||||
User.userType.in_([UserType.LOCAL, UserType.OWNER]),
|
||||
User.created_at >= month_ago
|
||||
).count()
|
||||
|
||||
# Invite metrics
|
||||
total_invites = Invite.query.count()
|
||||
active_invites = Invite.query.filter_by(is_active=True).count()
|
||||
used_invites = Invite.query.filter(Invite.current_uses > 0).count()
|
||||
|
||||
# Recent invite activity
|
||||
invites_used_this_week = HistoryLog.query.filter(
|
||||
HistoryLog.event_type.in_([
|
||||
EventType.INVITE_USED_SUCCESS_PLEX,
|
||||
EventType.INVITE_USED_SUCCESS_DISCORD,
|
||||
EventType.INVITE_USER_ACCEPTED_AND_SHARED
|
||||
]),
|
||||
HistoryLog.timestamp >= week_ago
|
||||
).count()
|
||||
|
||||
# Server metrics
|
||||
total_servers = MediaServer.query.count()
|
||||
active_servers = MediaServer.query.filter_by(is_active=True).count()
|
||||
total_libraries = MediaLibrary.query.count()
|
||||
|
||||
# Streaming metrics
|
||||
total_streams = MediaStreamHistory.query.count()
|
||||
streams_today = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.started_at >= day_ago
|
||||
).count()
|
||||
streams_this_week = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.started_at >= week_ago
|
||||
).count()
|
||||
|
||||
# Active sessions (streams without stopped_at)
|
||||
active_sessions = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.stopped_at.is_(None)
|
||||
).count()
|
||||
|
||||
# History/Activity metrics
|
||||
recent_events_today = HistoryLog.query.filter(
|
||||
HistoryLog.timestamp >= day_ago
|
||||
).count()
|
||||
|
||||
recent_events_week = HistoryLog.query.filter(
|
||||
HistoryLog.timestamp >= week_ago
|
||||
).count()
|
||||
|
||||
# Most active event types (last 7 days)
|
||||
from sqlalchemy import func
|
||||
top_events = db.session.query(
|
||||
HistoryLog.event_type,
|
||||
func.count(HistoryLog.id).label('count')
|
||||
).filter(
|
||||
HistoryLog.timestamp >= week_ago
|
||||
).group_by(HistoryLog.event_type).order_by(func.count(HistoryLog.id).desc()).limit(5).all()
|
||||
|
||||
top_event_types = [{
|
||||
'event_type': event[0].value if event[0] else 'unknown',
|
||||
'count': event[1]
|
||||
} for event in top_events]
|
||||
|
||||
# Build response
|
||||
metrics = {
|
||||
'users': {
|
||||
'total': total_users,
|
||||
'active': active_users,
|
||||
'service_accounts': service_users,
|
||||
'active_service_accounts': active_service_users,
|
||||
'created_this_month': users_created_this_month
|
||||
},
|
||||
'invites': {
|
||||
'total': total_invites,
|
||||
'active': active_invites,
|
||||
'used': used_invites,
|
||||
'used_this_week': invites_used_this_week
|
||||
},
|
||||
'servers': {
|
||||
'total': total_servers,
|
||||
'active': active_servers,
|
||||
'libraries': total_libraries
|
||||
},
|
||||
'streaming': {
|
||||
'total_streams': total_streams,
|
||||
'active_sessions': active_sessions,
|
||||
'streams_today': streams_today,
|
||||
'streams_this_week': streams_this_week
|
||||
},
|
||||
'activity': {
|
||||
'events_today': recent_events_today,
|
||||
'events_this_week': recent_events_week,
|
||||
'top_event_types': top_event_types
|
||||
},
|
||||
'generated_at': now.isoformat() + 'Z'
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'data': metrics,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'time_ranges': {
|
||||
'day_ago': day_ago.isoformat() + 'Z',
|
||||
'week_ago': week_ago.isoformat() + 'Z',
|
||||
'month_ago': month_ago.isoformat() + 'Z'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/metrics/users', methods=['GET'])
|
||||
@login_required
|
||||
def get_user_metrics():
|
||||
"""Get detailed user metrics with breakdown by type and status"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Query user counts by type
|
||||
users_by_type = db.session.query(
|
||||
User.userType,
|
||||
func.count(User.id).label('count')
|
||||
).group_by(User.userType).all()
|
||||
|
||||
type_breakdown = {
|
||||
user_type[0].value: user_type[1]
|
||||
for user_type in users_by_type
|
||||
}
|
||||
|
||||
# Active vs inactive
|
||||
active_count = User.query.filter_by(is_active=True).count()
|
||||
inactive_count = User.query.filter_by(is_active=False).count()
|
||||
|
||||
# Users with expiration dates
|
||||
expired_count = User.query.filter(
|
||||
User.access_expires_at < datetime.utcnow()
|
||||
).count()
|
||||
|
||||
upcoming_expirations = User.query.filter(
|
||||
User.access_expires_at > datetime.utcnow(),
|
||||
User.access_expires_at < datetime.utcnow() + timedelta(days=7)
|
||||
).count()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'by_type': type_breakdown,
|
||||
'active': active_count,
|
||||
'inactive': inactive_count,
|
||||
'expired': expired_count,
|
||||
'expiring_soon': upcoming_expirations
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/metrics/streaming', methods=['GET'])
|
||||
@login_required
|
||||
def get_streaming_metrics():
|
||||
"""Get detailed streaming metrics with time-series data"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
days = int(request.args.get('days', 7))
|
||||
days = min(days, 90) # Max 90 days
|
||||
|
||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
# Total streams in period
|
||||
total_streams = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.started_at >= cutoff
|
||||
).count()
|
||||
|
||||
# Average session duration
|
||||
from sqlalchemy import func as sql_func
|
||||
avg_duration = db.session.query(
|
||||
sql_func.avg(MediaStreamHistory.duration_seconds)
|
||||
).filter(
|
||||
MediaStreamHistory.started_at >= cutoff,
|
||||
MediaStreamHistory.duration_seconds.isnot(None)
|
||||
).scalar()
|
||||
|
||||
# Streams by server
|
||||
streams_by_server = db.session.query(
|
||||
MediaServer.server_nickname,
|
||||
sql_func.count(MediaStreamHistory.id).label('count')
|
||||
).join(MediaStreamHistory).filter(
|
||||
MediaStreamHistory.started_at >= cutoff
|
||||
).group_by(MediaServer.server_nickname).all()
|
||||
|
||||
server_breakdown = [{
|
||||
'server': server[0],
|
||||
'count': server[1]
|
||||
} for server in streams_by_server]
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'total_streams': total_streams,
|
||||
'avg_duration_seconds': round(avg_duration, 2) if avg_duration else 0,
|
||||
'by_server': server_breakdown,
|
||||
'days': days
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
165
app/routes/api_v1/plugins.py
Normal file
165
app/routes/api_v1/plugins.py
Normal file
@@ -0,0 +1,165 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models_plugins import Plugin, PluginRepository
|
||||
from app.services.plugin_manager import plugin_manager
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
from app.models import EventType
|
||||
|
||||
|
||||
def _serialize_plugin(plugin: Plugin):
|
||||
return {
|
||||
'plugin_id': plugin.plugin_id,
|
||||
'name': plugin.name,
|
||||
'description': plugin.description,
|
||||
'version': plugin.version,
|
||||
'type': plugin.plugin_type.value if plugin.plugin_type else None,
|
||||
'status': plugin.status.value if plugin.status else None,
|
||||
'author': plugin.author,
|
||||
'homepage': plugin.homepage,
|
||||
'repository': plugin.repository,
|
||||
'license': plugin.license,
|
||||
'installed_at': plugin.installed_at.isoformat() if plugin.installed_at else None,
|
||||
'last_updated': plugin.last_updated.isoformat() if plugin.last_updated else None,
|
||||
'last_error': plugin.last_error,
|
||||
'servers_count': plugin.servers_count,
|
||||
'supported_features': plugin.supported_features,
|
||||
'config_schema': plugin.config_schema,
|
||||
'default_config': plugin.default_config
|
||||
}
|
||||
|
||||
|
||||
def _plugin_action(plugin_id: str, action: str):
|
||||
request_id = str(uuid4())
|
||||
plugin = Plugin.query.filter_by(plugin_id=plugin_id).first_or_404()
|
||||
|
||||
if action == 'enable':
|
||||
success = plugin_manager.enable_plugin(plugin_id)
|
||||
elif action == 'disable':
|
||||
success = plugin_manager.disable_plugin(plugin_id)
|
||||
elif action == 'install':
|
||||
success = plugin_manager.install_plugin(plugin_id)
|
||||
elif action == 'uninstall':
|
||||
success = plugin_manager.uninstall_plugin(plugin_id)
|
||||
else:
|
||||
return jsonify({'error': {'code': 'INVALID_ACTION', 'message': 'Unsupported plugin action.'}, 'meta': {'request_id': request_id}}), 400
|
||||
|
||||
if not success:
|
||||
return jsonify({'error': {'code': 'PLUGIN_ACTION_FAILED', 'message': plugin.last_error or 'Plugin action failed.'}, 'meta': {'request_id': request_id}}), 500
|
||||
|
||||
db.session.commit()
|
||||
log_event(EventType.SETTING_CHANGE, f"Plugin '{plugin_id}' {action}d.", admin_id=current_user.id)
|
||||
return jsonify({'data': _serialize_plugin(plugin), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/plugins', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def list_plugins():
|
||||
request_id = str(uuid4())
|
||||
plugins = Plugin.query.order_by(Plugin.name.asc()).all()
|
||||
available_plugins = plugin_manager.get_available_plugins()
|
||||
available_lookup = {}
|
||||
for available in available_plugins:
|
||||
plugin_id_value = None
|
||||
if isinstance(available, Plugin):
|
||||
plugin_id_value = available.plugin_id
|
||||
available_lookup[plugin_id_value] = _serialize_plugin(available)
|
||||
elif isinstance(available, dict):
|
||||
plugin_id_value = available.get('plugin_id')
|
||||
if plugin_id_value:
|
||||
available_lookup[plugin_id_value] = available
|
||||
|
||||
data = []
|
||||
for plugin in plugins:
|
||||
entry = _serialize_plugin(plugin)
|
||||
entry['available'] = available_lookup.get(plugin.plugin_id)
|
||||
data.append(entry)
|
||||
|
||||
return jsonify({'data': data, 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/plugins/<plugin_id>/enable', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def enable_plugin(plugin_id):
|
||||
return _plugin_action(plugin_id, 'enable')
|
||||
|
||||
|
||||
@bp.route('/plugins/<plugin_id>/disable', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def disable_plugin(plugin_id):
|
||||
return _plugin_action(plugin_id, 'disable')
|
||||
|
||||
|
||||
@bp.route('/plugins/<plugin_id>/install', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def install_plugin(plugin_id):
|
||||
return _plugin_action(plugin_id, 'install')
|
||||
|
||||
|
||||
@bp.route('/plugins/<plugin_id>/uninstall', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def uninstall_plugin(plugin_id):
|
||||
return _plugin_action(plugin_id, 'uninstall')
|
||||
|
||||
|
||||
@bp.route('/plugin-repositories', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def list_plugin_repositories():
|
||||
request_id = str(uuid4())
|
||||
repos = PluginRepository.query.order_by(PluginRepository.name.asc()).all()
|
||||
data = [
|
||||
{
|
||||
'id': repo.id,
|
||||
'name': repo.name,
|
||||
'url': repo.url,
|
||||
'description': repo.description,
|
||||
'is_enabled': repo.is_enabled,
|
||||
'is_official': repo.is_official,
|
||||
'last_sync': repo.last_sync.isoformat() if repo.last_sync else None,
|
||||
'last_error': repo.last_error
|
||||
}
|
||||
for repo in repos
|
||||
]
|
||||
return jsonify({'data': data, 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/plugin-repositories', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def create_plugin_repository():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
name = (payload.get('name') or '').strip()
|
||||
url_value = (payload.get('url') or '').strip()
|
||||
description = payload.get('description')
|
||||
|
||||
if not name or not url_value:
|
||||
return jsonify({'error': {'code': 'INVALID_PAYLOAD', 'message': 'Name and URL are required.'}, 'meta': {'request_id': request_id}}), 400
|
||||
|
||||
repo = PluginRepository(name=name, url=url_value, description=description)
|
||||
db.session.add(repo)
|
||||
db.session.commit()
|
||||
log_event(EventType.SETTING_CHANGE, f"Plugin repository '{name}' added.", admin_id=current_user.id)
|
||||
return jsonify({'data': {'id': repo.id}, 'meta': {'request_id': request_id}}), 201
|
||||
|
||||
|
||||
@bp.route('/plugin-repositories/<int:repo_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('manage_plugins')
|
||||
def delete_plugin_repository(repo_id):
|
||||
request_id = str(uuid4())
|
||||
repo = PluginRepository.query.get_or_404(repo_id)
|
||||
db.session.delete(repo)
|
||||
db.session.commit()
|
||||
log_event(EventType.SETTING_CHANGE, f"Plugin repository '{repo.name}' removed.", admin_id=current_user.id)
|
||||
return jsonify({'data': {'success': True}, 'meta': {'request_id': request_id}})
|
||||
663
app/routes/api_v1/servers.py
Normal file
663
app/routes/api_v1/servers.py
Normal file
@@ -0,0 +1,663 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
from app.utils.helpers import permission_required
|
||||
from app.models_media_services import ServiceType, MediaServer, MediaLibrary
|
||||
from app.extensions import db
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def _serialize_server(server, include_status=False):
|
||||
websocket_refresh_interval = 30
|
||||
if isinstance(server.config, dict):
|
||||
try:
|
||||
websocket_refresh_interval = int(server.config.get('websocket_refresh_interval', 30))
|
||||
except (TypeError, ValueError):
|
||||
websocket_refresh_interval = 30
|
||||
|
||||
data = {
|
||||
'id': server.id,
|
||||
'server_nickname': server.server_nickname,
|
||||
'server_name': server.server_name,
|
||||
'service_type': server.service_type.value,
|
||||
'url': server.url,
|
||||
'public_url': server.public_url,
|
||||
'is_active': server.is_active,
|
||||
'overseerr_enabled': server.overseerr_enabled,
|
||||
'overseerr_url': server.overseerr_url,
|
||||
'last_status': server.last_status,
|
||||
'last_status_check': server.last_status_check.isoformat() if server.last_status_check else None,
|
||||
'last_version': server.last_version,
|
||||
'last_sync_at': server.last_sync_at.isoformat() if server.last_sync_at else None,
|
||||
'websocket_refresh_interval': websocket_refresh_interval,
|
||||
}
|
||||
if include_status:
|
||||
status = {}
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if service:
|
||||
status = service.get_server_info() or {}
|
||||
except Exception as exc:
|
||||
status = {
|
||||
'online': False,
|
||||
'error': str(exc)
|
||||
}
|
||||
data['status'] = status
|
||||
return data
|
||||
|
||||
|
||||
@bp.route('/servers', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def list_servers():
|
||||
request_id = str(uuid4())
|
||||
include_status = request.args.get('include_status', 'false').lower() == 'true'
|
||||
active_only_param = request.args.get('active_only')
|
||||
if active_only_param is None:
|
||||
active_only = False
|
||||
else:
|
||||
active_only = active_only_param.lower() != 'false'
|
||||
|
||||
service_type_param = request.args.get('service_type')
|
||||
servers_query = MediaServiceManager.get_all_servers(active_only=active_only)
|
||||
|
||||
if service_type_param:
|
||||
try:
|
||||
service_type_enum = ServiceType(service_type_param.lower())
|
||||
servers_query = [server for server in servers_query if server.service_type == service_type_enum]
|
||||
except ValueError:
|
||||
servers_query = []
|
||||
|
||||
servers = servers_query
|
||||
return jsonify({
|
||||
'data': [_serialize_server(server, include_status) for server in servers],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'filters': {
|
||||
'include_status': include_status,
|
||||
'active_only': active_only,
|
||||
'service_type': service_type_param
|
||||
},
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_servers')
|
||||
def get_server(server_id):
|
||||
"""Get a single server by ID with optional status check"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
include_status = request.args.get('include_status', 'false').lower() == 'true'
|
||||
include_libraries = request.args.get('include_libraries', 'false').lower() == 'true'
|
||||
|
||||
data = _serialize_server(server, include_status)
|
||||
|
||||
if include_libraries:
|
||||
data['libraries'] = [{
|
||||
'id': lib.id,
|
||||
'internal_id': lib.internal_id,
|
||||
'external_id': lib.external_id,
|
||||
'name': lib.name,
|
||||
'library_type': lib.library_type,
|
||||
'item_count': lib.item_count,
|
||||
'last_scanned': lib.last_scanned.isoformat() if lib.last_scanned else None
|
||||
} for lib in server.libraries]
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/servers', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def create_server():
|
||||
"""Create a new media server"""
|
||||
request_id = str(uuid4())
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['server_nickname', 'service_type', 'url']
|
||||
missing_fields = [field for field in required_fields if not data.get(field)]
|
||||
|
||||
if missing_fields:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'VALIDATION_ERROR',
|
||||
'message': 'Missing required fields',
|
||||
'details': {'missing_fields': missing_fields}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
# Check if server_nickname already exists
|
||||
existing = MediaServer.query.filter_by(server_nickname=data['server_nickname']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_SERVER_NICKNAME',
|
||||
'message': f'Server with nickname "{data["server_nickname"]}" already exists',
|
||||
'details': {'server_nickname': data['server_nickname']},
|
||||
'hint': 'Choose a unique nickname for this server'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Validate service_type
|
||||
try:
|
||||
service_type_enum = ServiceType(data['service_type'].lower())
|
||||
except ValueError:
|
||||
valid_types = [st.value for st in ServiceType]
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_SERVICE_TYPE',
|
||||
'message': f'Invalid service_type: {data["service_type"]}',
|
||||
'details': {'valid_types': valid_types}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
websocket_refresh_interval = data.get('websocket_refresh_interval', 30)
|
||||
if service_type_enum == ServiceType.PLEX:
|
||||
try:
|
||||
websocket_refresh_interval = int(websocket_refresh_interval)
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_WEBSOCKET_INTERVAL',
|
||||
'message': 'WebSocket refresh interval must be an integer.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if websocket_refresh_interval < 2 or websocket_refresh_interval > 300:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'WEBSOCKET_INTERVAL_OUT_OF_RANGE',
|
||||
'message': 'WebSocket refresh interval must be between 2 and 300 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
else:
|
||||
websocket_refresh_interval = None
|
||||
|
||||
config_payload = data.get('config')
|
||||
if not isinstance(config_payload, dict):
|
||||
config_payload = {}
|
||||
|
||||
if service_type_enum == ServiceType.PLEX and websocket_refresh_interval is not None:
|
||||
config_payload = {**config_payload, 'websocket_refresh_interval': websocket_refresh_interval}
|
||||
elif service_type_enum != ServiceType.PLEX and isinstance(config_payload, dict):
|
||||
config_payload.pop('websocket_refresh_interval', None)
|
||||
|
||||
# Create server
|
||||
server = MediaServer(
|
||||
server_nickname=data['server_nickname'],
|
||||
server_name=data.get('server_name'),
|
||||
service_type=service_type_enum,
|
||||
url=data['url'],
|
||||
api_key=data.get('api_key'),
|
||||
username=data.get('username'),
|
||||
password=data.get('password'),
|
||||
public_url=data.get('public_url'),
|
||||
overseerr_enabled=data.get('overseerr_enabled', False),
|
||||
overseerr_url=data.get('overseerr_url'),
|
||||
overseerr_api_key=data.get('overseerr_api_key'),
|
||||
config=config_payload,
|
||||
is_active=data.get('is_active', True)
|
||||
)
|
||||
|
||||
try:
|
||||
db.session.add(server)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_server(server, include_status=False),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
}), 201
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_CREATION_FAILED',
|
||||
'message': 'Failed to create server',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def update_server(server_id):
|
||||
"""Update an existing media server"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Check for nickname conflicts if changing nickname
|
||||
if 'server_nickname' in data and data['server_nickname'] != server.server_nickname:
|
||||
existing = MediaServer.query.filter_by(server_nickname=data['server_nickname']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_SERVER_NICKNAME',
|
||||
'message': f'Server with nickname "{data["server_nickname"]}" already exists',
|
||||
'details': {'server_nickname': data['server_nickname']}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Update fields
|
||||
updatable_fields = [
|
||||
'server_nickname', 'server_name', 'url', 'api_key', 'username',
|
||||
'password', 'public_url', 'overseerr_enabled', 'overseerr_url',
|
||||
'overseerr_api_key', 'config', 'is_active'
|
||||
]
|
||||
|
||||
for field in updatable_fields:
|
||||
if field in data:
|
||||
setattr(server, field, data[field])
|
||||
|
||||
# Handle service_type separately (needs enum conversion)
|
||||
if 'service_type' in data:
|
||||
try:
|
||||
server.service_type = ServiceType(data['service_type'].lower())
|
||||
except ValueError:
|
||||
valid_types = [st.value for st in ServiceType]
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_SERVICE_TYPE',
|
||||
'message': f'Invalid service_type: {data["service_type"]}',
|
||||
'details': {'valid_types': valid_types}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
# Ensure config is a dict we can mutate
|
||||
if not isinstance(server.config, dict):
|
||||
server.config = {}
|
||||
|
||||
if 'websocket_refresh_interval' in data or server.service_type == ServiceType.PLEX:
|
||||
websocket_interval_value = data.get('websocket_refresh_interval')
|
||||
if websocket_interval_value is not None:
|
||||
try:
|
||||
websocket_interval_value = int(websocket_interval_value)
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_WEBSOCKET_INTERVAL',
|
||||
'message': 'WebSocket refresh interval must be an integer.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if websocket_interval_value < 2 or websocket_interval_value > 300:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'WEBSOCKET_INTERVAL_OUT_OF_RANGE',
|
||||
'message': 'WebSocket refresh interval must be between 2 and 300 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if server.service_type == ServiceType.PLEX:
|
||||
server.config['websocket_refresh_interval'] = websocket_interval_value
|
||||
elif 'websocket_refresh_interval' not in server.config and server.service_type == ServiceType.PLEX:
|
||||
server.config['websocket_refresh_interval'] = 30
|
||||
|
||||
if server.service_type != ServiceType.PLEX:
|
||||
server.config.pop('websocket_refresh_interval', None)
|
||||
|
||||
server.updated_at = datetime.utcnow()
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_server(server, include_status=False),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_UPDATE_FAILED',
|
||||
'message': 'Failed to update server',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def delete_server(server_id):
|
||||
"""Delete a media server and its associated users"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
server_data = _serialize_server(server, include_status=False)
|
||||
|
||||
try:
|
||||
from app.models import User
|
||||
from app.models_media_services import MediaItem, MediaStreamHistory
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
# Check if overseerr_user_links table exists
|
||||
inspector = inspect(db.engine)
|
||||
table_names = inspector.get_table_names()
|
||||
has_overseerr_table = 'overseerr_user_links' in table_names
|
||||
|
||||
# Get counts for response
|
||||
users_count = User.query.filter_by(server_id=server_id).count()
|
||||
libraries_count = len(server.libraries)
|
||||
items_count = MediaItem.query.filter_by(server_id=server_id).count()
|
||||
stream_history_count = MediaStreamHistory.query.filter_by(server_id=server_id).count()
|
||||
|
||||
# Delete Overseerr links if table exists
|
||||
overseerr_links_count = 0
|
||||
if has_overseerr_table:
|
||||
try:
|
||||
from app.models_overseerr import OverseerrUserLink
|
||||
overseerr_links_count = OverseerrUserLink.query.filter_by(server_id=server_id).count()
|
||||
OverseerrUserLink.query.filter_by(server_id=server_id).delete()
|
||||
except Exception as e:
|
||||
current_app.logger.warning(f"Failed to delete overseerr links: {str(e)}")
|
||||
else:
|
||||
current_app.logger.debug("overseerr_user_links table doesn't exist yet, skipping")
|
||||
|
||||
# Delete all associated records in the correct order
|
||||
# 1. Delete stream history (depends on server)
|
||||
MediaStreamHistory.query.filter_by(server_id=server_id).delete()
|
||||
|
||||
# 2. Delete media items (depends on server and library)
|
||||
MediaItem.query.filter_by(server_id=server_id).delete()
|
||||
|
||||
# 3. Delete associated service users (depends on server)
|
||||
# Note: This only deletes service users, not local admins
|
||||
User.query.filter_by(server_id=server_id).delete()
|
||||
|
||||
# 4. Delete the server (libraries will cascade delete automatically)
|
||||
# Use expunge to remove server from session to avoid lazy loading relationships
|
||||
db.session.expunge(server)
|
||||
|
||||
# Delete server directly via raw SQL to avoid relationship loading
|
||||
db.session.execute(
|
||||
db.text("DELETE FROM media_servers WHERE id = :server_id"),
|
||||
{"server_id": server_id}
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'deleted_server': server_data,
|
||||
'deleted_users_count': users_count,
|
||||
'deleted_libraries_count': libraries_count,
|
||||
'deleted_items_count': items_count,
|
||||
'deleted_stream_history_count': stream_history_count,
|
||||
'deleted_overseerr_links_count': overseerr_links_count
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
current_app.logger.error(f"Failed to delete server {server_id}: {str(e)}")
|
||||
current_app.logger.error(f"Traceback: {error_trace}")
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_DELETION_FAILED',
|
||||
'message': 'Failed to delete server',
|
||||
'details': {'error': str(e), 'traceback': error_trace}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>/sync-libraries', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def sync_server_libraries(server_id):
|
||||
"""Sync libraries from a media server"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
try:
|
||||
# Use MediaServiceManager to sync libraries
|
||||
result = MediaServiceManager.sync_server_libraries(server_id)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'server_id': server_id,
|
||||
'libraries_synced': result.get('libraries_count', 0),
|
||||
'message': result.get('message', 'Libraries synced successfully')
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LIBRARY_SYNC_FAILED',
|
||||
'message': 'Failed to sync libraries',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>/sync-users', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def sync_server_users(server_id):
|
||||
"""Sync users from a media server"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
try:
|
||||
# Use MediaServiceManager to sync users
|
||||
result = MediaServiceManager.sync_server_users(server_id)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'server_id': server_id,
|
||||
'users_synced': result.get('users_count', 0),
|
||||
'message': result.get('message', 'Users synced successfully')
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'USER_SYNC_FAILED',
|
||||
'message': 'Failed to sync users',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/servers/<int:server_id>/test', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_servers')
|
||||
def test_server_connection(server_id):
|
||||
"""Test connection to a media server"""
|
||||
request_id = str(uuid4())
|
||||
server = MediaServer.query.get(server_id)
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server with ID {server_id} not found',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if not service:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_CREATION_FAILED',
|
||||
'message': 'Failed to create service instance',
|
||||
'details': {'server_id': server_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
# Test connection
|
||||
server_info = service.get_server_info()
|
||||
|
||||
if server_info and server_info.get('online'):
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'online': True,
|
||||
'server_info': server_info,
|
||||
'message': 'Connection successful'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': False,
|
||||
'online': False,
|
||||
'server_info': server_info,
|
||||
'message': 'Server is offline or unreachable'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'CONNECTION_TEST_FAILED',
|
||||
'message': 'Failed to test server connection',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
62
app/routes/api_v1/settings_advanced.py
Normal file
62
app/routes/api_v1/settings_advanced.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Setting, SettingValueType, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _serialize_advanced_settings():
|
||||
raw_timeout = Setting.get('WTF_CSRF_TIME_LIMIT')
|
||||
if raw_timeout is None:
|
||||
timeout_minutes = 0
|
||||
else:
|
||||
timeout_minutes = int(raw_timeout) // 60 if raw_timeout else 0
|
||||
return {
|
||||
'csrf_token_timeout_minutes': timeout_minutes
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/advanced', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_advanced_settings')
|
||||
def get_advanced_settings():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({'data': _serialize_advanced_settings(), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/settings/advanced', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_advanced_settings')
|
||||
def update_advanced_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
timeout_minutes = payload.get('csrf_token_timeout_minutes')
|
||||
try:
|
||||
timeout_minutes = int(timeout_minutes)
|
||||
if timeout_minutes < 0 or timeout_minutes > 1440:
|
||||
raise ValueError
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_CSRF_TIMEOUT',
|
||||
'message': 'CSRF token timeout must be between 0 and 1440 minutes.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if timeout_minutes == 0:
|
||||
Setting.set('WTF_CSRF_TIME_LIMIT', None, SettingValueType.STRING, "CSRF Token Timeout")
|
||||
current_app.config['WTF_CSRF_TIME_LIMIT'] = None
|
||||
else:
|
||||
timeout_seconds = timeout_minutes * 60
|
||||
Setting.set('WTF_CSRF_TIME_LIMIT', timeout_seconds, SettingValueType.INTEGER, "CSRF Token Timeout")
|
||||
current_app.config['WTF_CSRF_TIME_LIMIT'] = timeout_seconds
|
||||
|
||||
log_event(EventType.SETTING_CHANGE, "Advanced settings updated via API.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({'data': _serialize_advanced_settings(), 'meta': {'request_id': request_id}}), 200
|
||||
|
||||
207
app/routes/api_v1/settings_discord.py
Normal file
207
app/routes/api_v1/settings_discord.py
Normal file
@@ -0,0 +1,207 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app, g
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Setting, SettingValueType, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _compute_redirects():
|
||||
app_base = Setting.get('APP_BASE_URL') or current_app.config.get('APP_BASE_URL') or ''
|
||||
app_base = (app_base or '').rstrip('/')
|
||||
invite_path = '/invites/discord_callback'
|
||||
admin_path = '/auth/discord_callback_admin'
|
||||
if not app_base:
|
||||
return None, None
|
||||
return f"{app_base}{invite_path}", f"{app_base}{admin_path}"
|
||||
|
||||
|
||||
def _serialize_discord_settings():
|
||||
invite_redirect, admin_redirect = _compute_redirects()
|
||||
stored_client_secret = Setting.get('DISCORD_CLIENT_SECRET')
|
||||
stored_bot_token = Setting.get('DISCORD_BOT_TOKEN')
|
||||
return {
|
||||
'enable_oauth': Setting.get_bool('DISCORD_OAUTH_ENABLED', False),
|
||||
'client_id': Setting.get('DISCORD_CLIENT_ID'),
|
||||
'client_secret_set': bool(stored_client_secret),
|
||||
'oauth_auth_url': Setting.get('DISCORD_OAUTH_AUTH_URL'),
|
||||
'redirect_uri_invite': invite_redirect,
|
||||
'redirect_uri_admin': admin_redirect,
|
||||
'enable_membership_requirement': Setting.get_bool('ENABLE_DISCORD_MEMBERSHIP_REQUIREMENT', False),
|
||||
'guild_id': Setting.get('DISCORD_GUILD_ID'),
|
||||
'server_invite_url': Setting.get('DISCORD_SERVER_INVITE_URL'),
|
||||
'enable_bot': Setting.get_bool('DISCORD_BOT_ENABLED', False),
|
||||
'bot_token_set': bool(stored_bot_token),
|
||||
'monitored_role_id': Setting.get('DISCORD_MONITORED_ROLE_ID'),
|
||||
'thread_channel_id': Setting.get('DISCORD_THREAD_CHANNEL_ID'),
|
||||
'bot_log_channel_id': Setting.get('DISCORD_BOT_LOG_CHANNEL_ID'),
|
||||
'whitelist_sharers': Setting.get_bool('DISCORD_BOT_WHITELIST_SHARERS', False),
|
||||
'admin_linked': bool(current_user.discord_user_id),
|
||||
'admin_user': {
|
||||
'username': current_user.discord_username,
|
||||
'id': current_user.discord_user_id,
|
||||
'avatar': current_user.discord_avatar_hash
|
||||
} if current_user.discord_user_id else None
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/discord', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_discord_settings')
|
||||
def get_discord_settings():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({'data': _serialize_discord_settings(), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/settings/discord', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_discord_settings')
|
||||
def update_discord_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
enable_oauth = bool(payload.get('enable_oauth', False))
|
||||
enable_bot = bool(payload.get('enable_bot', False))
|
||||
require_membership = bool(payload.get('enable_membership_requirement', False))
|
||||
|
||||
if (enable_bot or require_membership) and not enable_oauth:
|
||||
enable_oauth = True
|
||||
|
||||
client_id = payload.get('client_id')
|
||||
client_secret = payload.get('client_secret')
|
||||
oauth_auth_url = payload.get('oauth_auth_url')
|
||||
guild_id = payload.get('guild_id')
|
||||
server_invite_url = payload.get('server_invite_url')
|
||||
bot_token = payload.get('bot_token')
|
||||
monitored_role_id = payload.get('monitored_role_id')
|
||||
thread_channel_id = payload.get('thread_channel_id')
|
||||
bot_log_channel_id = payload.get('bot_log_channel_id')
|
||||
whitelist_sharers = bool(payload.get('whitelist_sharers', False))
|
||||
|
||||
if enable_oauth:
|
||||
existing_client_id = Setting.get('DISCORD_CLIENT_ID')
|
||||
if not (client_id or existing_client_id):
|
||||
return jsonify({
|
||||
'error': {'code': 'CLIENT_ID_REQUIRED', 'message': 'Discord client ID is required when OAuth is enabled.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
existing_secret = Setting.get('DISCORD_CLIENT_SECRET')
|
||||
if not (client_secret or existing_secret):
|
||||
return jsonify({
|
||||
'error': {'code': 'CLIENT_SECRET_REQUIRED', 'message': 'Discord client secret is required when OAuth is enabled.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
if not oauth_auth_url:
|
||||
oauth_auth_url = Setting.get('DISCORD_OAUTH_AUTH_URL')
|
||||
else:
|
||||
enable_bot = False
|
||||
require_membership = False
|
||||
|
||||
if enable_bot or require_membership:
|
||||
final_guild_id = guild_id or Setting.get('DISCORD_GUILD_ID')
|
||||
if not final_guild_id:
|
||||
return jsonify({
|
||||
'error': {'code': 'GUILD_ID_REQUIRED', 'message': 'Discord guild ID is required when bot features or membership requirement are enabled.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
guild_id = final_guild_id
|
||||
if require_membership:
|
||||
final_invite = server_invite_url or Setting.get('DISCORD_SERVER_INVITE_URL')
|
||||
if not final_invite:
|
||||
return jsonify({
|
||||
'error': {'code': 'INVITE_URL_REQUIRED', 'message': 'Server invite URL is required when membership requirement is enabled.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
server_invite_url = final_invite
|
||||
|
||||
if enable_bot:
|
||||
final_token = bot_token or Setting.get('DISCORD_BOT_TOKEN')
|
||||
if not final_token:
|
||||
return jsonify({
|
||||
'error': {'code': 'BOT_TOKEN_REQUIRED', 'message': 'Bot token is required when bot features are enabled.'},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
bot_token = final_token
|
||||
|
||||
Setting.set('DISCORD_OAUTH_ENABLED', enable_oauth, SettingValueType.BOOLEAN)
|
||||
current_app.config['DISCORD_OAUTH_ENABLED'] = enable_oauth
|
||||
if hasattr(g, 'discord_oauth_enabled_for_invite'):
|
||||
g.discord_oauth_enabled_for_invite = enable_oauth
|
||||
|
||||
if enable_oauth:
|
||||
if client_id:
|
||||
Setting.set('DISCORD_CLIENT_ID', client_id, SettingValueType.STRING)
|
||||
if client_secret:
|
||||
Setting.set('DISCORD_CLIENT_SECRET', client_secret, SettingValueType.SECRET)
|
||||
if oauth_auth_url:
|
||||
Setting.set('DISCORD_OAUTH_AUTH_URL', oauth_auth_url, SettingValueType.STRING)
|
||||
invite_redirect, admin_redirect = _compute_redirects()
|
||||
if invite_redirect:
|
||||
Setting.set('DISCORD_REDIRECT_URI_INVITE', invite_redirect, SettingValueType.STRING)
|
||||
Setting.set('DISCORD_REDIRECT_URI_ADMIN_LINK', admin_redirect, SettingValueType.STRING)
|
||||
Setting.set('ENABLE_DISCORD_MEMBERSHIP_REQUIREMENT', require_membership, SettingValueType.BOOLEAN)
|
||||
Setting.set('DISCORD_REQUIRE_GUILD_MEMBERSHIP', require_membership, SettingValueType.BOOLEAN)
|
||||
if enable_bot or require_membership:
|
||||
Setting.set('DISCORD_GUILD_ID', guild_id or '', SettingValueType.STRING)
|
||||
if require_membership:
|
||||
Setting.set('DISCORD_SERVER_INVITE_URL', server_invite_url or '', SettingValueType.STRING)
|
||||
elif not enable_bot:
|
||||
Setting.set('DISCORD_SERVER_INVITE_URL', '', SettingValueType.STRING)
|
||||
else:
|
||||
Setting.set('DISCORD_GUILD_ID', '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_SERVER_INVITE_URL', '', SettingValueType.STRING)
|
||||
else:
|
||||
for key, value_type in [
|
||||
('DISCORD_CLIENT_ID', SettingValueType.STRING),
|
||||
('DISCORD_CLIENT_SECRET', SettingValueType.SECRET),
|
||||
('DISCORD_OAUTH_AUTH_URL', SettingValueType.STRING),
|
||||
('DISCORD_REDIRECT_URI_INVITE', SettingValueType.STRING),
|
||||
('DISCORD_REDIRECT_URI_ADMIN_LINK', SettingValueType.STRING),
|
||||
('DISCORD_GUILD_ID', SettingValueType.STRING),
|
||||
('DISCORD_SERVER_INVITE_URL', SettingValueType.STRING)
|
||||
]:
|
||||
Setting.set(key, '', value_type)
|
||||
Setting.set('ENABLE_DISCORD_MEMBERSHIP_REQUIREMENT', False, SettingValueType.BOOLEAN)
|
||||
Setting.set('DISCORD_REQUIRE_GUILD_MEMBERSHIP', False, SettingValueType.BOOLEAN)
|
||||
|
||||
Setting.set('DISCORD_BOT_ENABLED', enable_bot, SettingValueType.BOOLEAN)
|
||||
if enable_bot:
|
||||
if bot_token:
|
||||
Setting.set('DISCORD_BOT_TOKEN', bot_token, SettingValueType.SECRET)
|
||||
Setting.set('DISCORD_MONITORED_ROLE_ID', monitored_role_id or '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_THREAD_CHANNEL_ID', thread_channel_id or '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_BOT_LOG_CHANNEL_ID', bot_log_channel_id or '', SettingValueType.STRING)
|
||||
if not require_membership and server_invite_url:
|
||||
Setting.set('DISCORD_SERVER_INVITE_URL', server_invite_url, SettingValueType.STRING)
|
||||
Setting.set('DISCORD_BOT_WHITELIST_SHARERS', whitelist_sharers, SettingValueType.BOOLEAN)
|
||||
else:
|
||||
Setting.set('DISCORD_BOT_TOKEN', '', SettingValueType.SECRET)
|
||||
Setting.set('DISCORD_MONITORED_ROLE_ID', '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_THREAD_CHANNEL_ID', '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_BOT_LOG_CHANNEL_ID', '', SettingValueType.STRING)
|
||||
Setting.set('DISCORD_BOT_WHITELIST_SHARERS', whitelist_sharers, SettingValueType.BOOLEAN)
|
||||
|
||||
log_event(EventType.DISCORD_CONFIG_SAVE, "Discord settings updated via API.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({'data': _serialize_discord_settings(), 'meta': {'request_id': request_id}}), 200
|
||||
|
||||
|
||||
@bp.route('/settings/discord/test', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_discord_settings')
|
||||
def test_discord_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
test_type = (payload.get('type') or 'oauth').lower()
|
||||
|
||||
if test_type == 'bot':
|
||||
if Setting.get('DISCORD_BOT_TOKEN'):
|
||||
return jsonify({'data': {'success': True, 'message': 'Bot token is configured.'}, 'meta': {'request_id': request_id}})
|
||||
return jsonify({'error': {'code': 'BOT_TOKEN_MISSING', 'message': 'Discord bot token has not been configured.'}, 'meta': {'request_id': request_id}}), 400
|
||||
|
||||
if Setting.get('DISCORD_CLIENT_ID') and Setting.get('DISCORD_CLIENT_SECRET'):
|
||||
return jsonify({'data': {'success': True, 'message': 'Discord OAuth credentials are configured.'}, 'meta': {'request_id': request_id}})
|
||||
|
||||
return jsonify({'error': {'code': 'OAUTH_NOT_CONFIGURED', 'message': 'Discord OAuth credentials are not fully configured.'}, 'meta': {'request_id': request_id}}), 400
|
||||
126
app/routes/api_v1/settings_general.py
Normal file
126
app/routes/api_v1/settings_general.py
Normal file
@@ -0,0 +1,126 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app, g
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Setting, SettingValueType, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _serialize_general_settings():
|
||||
return {
|
||||
'app_name': Setting.get('APP_NAME') or current_app.config.get('APP_NAME'),
|
||||
'app_base_url': Setting.get('APP_BASE_URL') or current_app.config.get('APP_BASE_URL'),
|
||||
'app_local_url': Setting.get('APP_LOCAL_URL') or current_app.config.get('APP_LOCAL_URL'),
|
||||
'enable_navbar_stream_badge': Setting.get_bool('ENABLE_NAVBAR_STREAM_BADGE', False),
|
||||
'session_monitoring_interval': Setting.get('SESSION_MONITORING_INTERVAL_SECONDS', 30),
|
||||
'api_timeout_seconds': Setting.get('API_TIMEOUT_SECONDS', current_app.config.get('API_TIMEOUT_SECONDS', 3))
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/general', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_general_settings')
|
||||
def get_general_settings():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({'data': _serialize_general_settings(), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/settings/general', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_general_settings')
|
||||
def update_general_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
app_name = (payload.get('app_name') or '').strip()
|
||||
app_base_url = (payload.get('app_base_url') or '').strip()
|
||||
app_local_url = (payload.get('app_local_url') or '').strip()
|
||||
enable_badge = bool(payload.get('enable_navbar_stream_badge', False))
|
||||
session_interval = payload.get('session_monitoring_interval')
|
||||
api_timeout = payload.get('api_timeout_seconds')
|
||||
|
||||
if not app_base_url:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'BASE_URL_REQUIRED',
|
||||
'message': 'Application base URL is required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if not app_base_url.startswith(('http://', 'https://')):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'BASE_URL_INVALID',
|
||||
'message': 'Application base URL must start with http:// or https://.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if app_local_url and not app_local_url.startswith(('http://', 'https://')):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'LOCAL_URL_INVALID',
|
||||
'message': 'Local application URL must start with http:// or https://.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
if session_interval is None:
|
||||
raise ValueError
|
||||
session_interval = int(session_interval)
|
||||
if session_interval < 10 or session_interval > 300:
|
||||
raise ValueError
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_MONITOR_INTERVAL',
|
||||
'message': 'Session monitoring interval must be between 10 and 300 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
if api_timeout is None:
|
||||
raise ValueError
|
||||
api_timeout = int(api_timeout)
|
||||
if api_timeout < 1 or api_timeout > 60:
|
||||
raise ValueError
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_API_TIMEOUT',
|
||||
'message': 'API timeout must be between 1 and 60 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
Setting.set('APP_NAME', app_name or current_app.config.get('APP_NAME'), SettingValueType.STRING, "Application Name")
|
||||
Setting.set('APP_BASE_URL', app_base_url.rstrip('/'), SettingValueType.STRING, "Application Base URL")
|
||||
Setting.set('APP_LOCAL_URL', app_local_url.rstrip('/') if app_local_url else '', SettingValueType.STRING, "Application Local URL")
|
||||
Setting.set('ENABLE_NAVBAR_STREAM_BADGE', enable_badge, SettingValueType.BOOLEAN, "Enable Nav Bar Stream Badge")
|
||||
Setting.set('SESSION_MONITORING_INTERVAL_SECONDS', session_interval, SettingValueType.INTEGER, "Session Monitoring Interval")
|
||||
Setting.set('API_TIMEOUT_SECONDS', api_timeout, SettingValueType.INTEGER, "API Request Timeout")
|
||||
|
||||
current_app.config['APP_NAME'] = app_name or current_app.config.get('APP_NAME')
|
||||
current_app.config['APP_BASE_URL'] = app_base_url.rstrip('/')
|
||||
current_app.config['APP_LOCAL_URL'] = app_local_url.rstrip('/') if app_local_url else None
|
||||
current_app.config['SESSION_MONITORING_INTERVAL_SECONDS'] = session_interval
|
||||
current_app.config['API_TIMEOUT_SECONDS'] = api_timeout
|
||||
if hasattr(g, 'app_name'):
|
||||
g.app_name = current_app.config['APP_NAME']
|
||||
if hasattr(g, 'app_base_url'):
|
||||
g.app_base_url = current_app.config['APP_BASE_URL']
|
||||
if hasattr(g, 'app_local_url'):
|
||||
g.app_local_url = current_app.config['APP_LOCAL_URL']
|
||||
|
||||
log_event(EventType.SETTING_CHANGE, "General application settings updated via API.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_general_settings(),
|
||||
'meta': {'request_id': request_id}
|
||||
}), 200
|
||||
|
||||
100
app/routes/api_v1/settings_logs.py
Normal file
100
app/routes/api_v1/settings_logs.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import or_, func
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import HistoryLog, EventType, User
|
||||
from app.utils.helpers import permission_required
|
||||
|
||||
|
||||
def _serialize_user(user: User | None):
|
||||
if not user:
|
||||
return None
|
||||
return {
|
||||
'id': user.id,
|
||||
'uuid': getattr(user, 'uuid', None),
|
||||
'username': user.localUsername or user.external_username,
|
||||
'display_name': user.get_display_name() if hasattr(user, 'get_display_name') else None
|
||||
}
|
||||
|
||||
|
||||
def _serialize_log(log: HistoryLog):
|
||||
return {
|
||||
'id': log.id,
|
||||
'timestamp': log.timestamp.isoformat() if log.timestamp else None,
|
||||
'event_type': log.event_type.name if log.event_type else None,
|
||||
'message': log.message,
|
||||
'details': log.details,
|
||||
'owner': _serialize_user(log.owner),
|
||||
'local_user': _serialize_user(log.affected_local_user),
|
||||
'invite_id': log.invite_id
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/logs', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_logs')
|
||||
def list_history_logs():
|
||||
request_id = str(uuid4())
|
||||
page = max(1, request.args.get('page', type=int) or 1)
|
||||
page_size = max(1, min(request.args.get('page_size', type=int) or 25, 100))
|
||||
search_message = (request.args.get('search_message') or '').strip()
|
||||
event_type_str = (request.args.get('event_type') or '').strip().upper()
|
||||
related_user = (request.args.get('related_user') or '').strip()
|
||||
|
||||
owner_alias = aliased(User)
|
||||
local_alias = aliased(User)
|
||||
|
||||
query = HistoryLog.query.outerjoin(owner_alias, HistoryLog.owner).outerjoin(local_alias, HistoryLog.affected_local_user)
|
||||
|
||||
if search_message:
|
||||
term = f"%{search_message}%"
|
||||
query = query.filter(HistoryLog.message.ilike(term))
|
||||
|
||||
if event_type_str:
|
||||
try:
|
||||
event_enum = EventType[event_type_str]
|
||||
query = query.filter(HistoryLog.event_type == event_enum)
|
||||
except KeyError:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_EVENT_TYPE',
|
||||
'message': f'Unknown event type: {event_type_str}'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if related_user:
|
||||
term = f"%{related_user}%"
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(owner_alias.localUsername).like(func.lower(term)),
|
||||
func.lower(owner_alias.external_username).like(func.lower(term)),
|
||||
func.lower(local_alias.localUsername).like(func.lower(term)),
|
||||
func.lower(local_alias.external_username).like(func.lower(term))
|
||||
)
|
||||
)
|
||||
|
||||
pagination = query.order_by(HistoryLog.timestamp.desc()).paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_log(item) for item in pagination.items],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'search_message': search_message,
|
||||
'event_type': event_type_str,
|
||||
'related_user': related_user
|
||||
}
|
||||
}
|
||||
}), 200
|
||||
|
||||
281
app/routes/api_v1/settings_streaming.py
Normal file
281
app/routes/api_v1/settings_streaming.py
Normal file
@@ -0,0 +1,281 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Setting, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
from app.extensions import db
|
||||
|
||||
|
||||
def _load_streaming_settings():
|
||||
enable_badge = Setting.get('ENABLE_NAVBAR_STREAM_BADGE', 'false').lower() == 'true'
|
||||
interval = Setting.get('SESSION_MONITORING_INTERVAL_SECONDS', '30')
|
||||
websocket_interval = Setting.get('STREAMING_WEBSOCKET_REFRESH_INTERVAL_SECONDS', '30')
|
||||
try:
|
||||
interval_value = int(interval)
|
||||
except (TypeError, ValueError):
|
||||
interval_value = 30
|
||||
try:
|
||||
websocket_interval_value = int(websocket_interval)
|
||||
except (TypeError, ValueError):
|
||||
websocket_interval_value = 30
|
||||
|
||||
return {
|
||||
'enable_navbar_stream_badge': enable_badge,
|
||||
'session_monitoring_interval': interval_value,
|
||||
'websocket_refresh_interval': websocket_interval_value
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/streaming', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_settings')
|
||||
def api_get_streaming_settings():
|
||||
request_id = str(uuid4())
|
||||
data = _load_streaming_settings()
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/settings/streaming', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('edit_settings')
|
||||
def api_update_streaming_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
enable_badge = bool(payload.get('enable_navbar_stream_badge', False))
|
||||
interval_value = payload.get('session_monitoring_interval')
|
||||
websocket_interval_value = payload.get('websocket_refresh_interval')
|
||||
|
||||
if interval_value is None:
|
||||
interval_value = _load_streaming_settings()['session_monitoring_interval']
|
||||
if websocket_interval_value is None:
|
||||
websocket_interval_value = _load_streaming_settings()['websocket_refresh_interval']
|
||||
|
||||
try:
|
||||
interval_value = int(interval_value)
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_INTERVAL',
|
||||
'message': 'Session monitoring interval must be an integer.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if interval_value < 5 or interval_value > 300:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INTERVAL_OUT_OF_RANGE',
|
||||
'message': 'Session monitoring interval must be between 5 and 300 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
websocket_interval_value = int(websocket_interval_value)
|
||||
except (TypeError, ValueError):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_WEBSOCKET_INTERVAL',
|
||||
'message': 'WebSocket refresh interval must be an integer.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if websocket_interval_value < 2 or websocket_interval_value > 300:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'WEBSOCKET_INTERVAL_OUT_OF_RANGE',
|
||||
'message': 'WebSocket refresh interval must be between 2 and 300 seconds.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
Setting.set('ENABLE_NAVBAR_STREAM_BADGE', 'true' if enable_badge else 'false')
|
||||
Setting.set('SESSION_MONITORING_INTERVAL_SECONDS', str(interval_value))
|
||||
Setting.set('STREAMING_WEBSOCKET_REFRESH_INTERVAL_SECONDS', str(websocket_interval_value))
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Streaming settings updated: badge={'on' if enable_badge else 'off'}, interval={interval_value}, websocket_refresh={websocket_interval_value}",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': _load_streaming_settings(),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
})
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to update streaming settings: {exc}")
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'UPDATE_FAILED',
|
||||
'message': 'Failed to update streaming settings.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/streaming/active', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_streaming')
|
||||
def api_get_active_sessions():
|
||||
"""Get currently active streaming sessions from all servers"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
try:
|
||||
# Get formatted sessions from all services (similar to streaming.py route)
|
||||
all_servers = MediaServiceManager.get_all_servers()
|
||||
sessions = []
|
||||
by_server = {}
|
||||
by_service = {}
|
||||
|
||||
for server in all_servers:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if service:
|
||||
try:
|
||||
server_nickname = server.server_nickname
|
||||
# Use the service's get_formatted_sessions method which returns properly formatted dicts
|
||||
formatted_sessions = service.get_formatted_sessions()
|
||||
sessions.extend(formatted_sessions)
|
||||
|
||||
# Group by server
|
||||
if formatted_sessions:
|
||||
server_name = server.server_nickname
|
||||
if server_name not in by_server:
|
||||
by_server[server_name] = []
|
||||
by_server[server_name].extend(formatted_sessions)
|
||||
|
||||
# Group by service
|
||||
service_type = server.service_type.value
|
||||
if service_type not in by_service:
|
||||
by_service[service_type] = []
|
||||
by_service[service_type].extend(formatted_sessions)
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error getting formatted sessions from {server_nickname}: {e}")
|
||||
|
||||
return jsonify({
|
||||
'sessions': sessions,
|
||||
'total_count': len(sessions),
|
||||
'by_server': by_server,
|
||||
'by_service': by_service,
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to get active sessions: {exc}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'FETCH_FAILED',
|
||||
'message': 'Failed to retrieve active sessions.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/streaming/terminate', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('terminate_stream')
|
||||
def api_terminate_session():
|
||||
"""Terminate an active streaming session"""
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
session_key = payload.get('session_key')
|
||||
service_type = payload.get('service_type')
|
||||
server_name = payload.get('server_name')
|
||||
message = payload.get('message', '')
|
||||
|
||||
if not session_key or not service_type or not server_name:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'session_key, service_type, and server_name are required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
# Get the specific server
|
||||
from app.models_media_services import MediaServer
|
||||
server = MediaServer.query.filter_by(
|
||||
server_nickname=server_name,
|
||||
is_active=True
|
||||
).first()
|
||||
|
||||
if not server:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVER_NOT_FOUND',
|
||||
'message': f'Server {server_name} not found or inactive.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Get the service for this server using MediaServiceFactory
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
|
||||
if not service:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_NOT_AVAILABLE',
|
||||
'message': f'Service not available for server {server_name}.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Terminate the session
|
||||
success = service.terminate_session(session_key, message)
|
||||
|
||||
if success:
|
||||
log_event(
|
||||
EventType.STREAMING_SESSION_TERMINATED,
|
||||
f"Session {session_key} terminated on {server_name}",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'message': f'Session terminated successfully'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'TERMINATION_FAILED',
|
||||
'message': 'Failed to terminate session.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to terminate session: {exc}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'TERMINATION_ERROR',
|
||||
'message': str(exc)
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
38
app/routes/api_v1/settings_user_accounts.py
Normal file
38
app/routes/api_v1/settings_user_accounts.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import Setting, SettingValueType, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _serialize_user_account_settings():
|
||||
return {
|
||||
'allow_user_accounts': Setting.get_bool('ALLOW_USER_ACCOUNTS', False)
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/settings/user-accounts', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('manage_users_general')
|
||||
def get_user_account_settings():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({'data': _serialize_user_account_settings(), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/settings/user-accounts', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_users_general')
|
||||
def update_user_account_settings():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
allow_accounts = bool(payload.get('allow_user_accounts', False))
|
||||
|
||||
Setting.set('ALLOW_USER_ACCOUNTS', allow_accounts, SettingValueType.BOOLEAN, "Allow User Accounts")
|
||||
log_event(EventType.SETTING_CHANGE, "User account settings updated via API.", admin_id=current_user.id)
|
||||
|
||||
return jsonify({'data': _serialize_user_account_settings(), 'meta': {'request_id': request_id}}), 200
|
||||
|
||||
62
app/routes/api_v1/setup.py
Normal file
62
app/routes/api_v1/setup.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.routes.setup import get_completed_steps
|
||||
from app.models_plugins import Plugin
|
||||
from app.models_media_services import ServiceType, MediaServer
|
||||
from app.routes.media_servers_modules.setup import test_connection_setup
|
||||
|
||||
|
||||
def _serialize_setup_status():
|
||||
steps = get_completed_steps()
|
||||
return {
|
||||
'account_complete': 'account' in steps,
|
||||
'app_complete': 'app' in steps,
|
||||
'plugins_complete': 'plugins' in steps,
|
||||
'discord_complete': 'discord' in steps,
|
||||
'completed_steps': sorted(list(steps))
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/setup/status', methods=['GET'])
|
||||
@login_required
|
||||
def setup_status():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({'data': _serialize_setup_status(), 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/setup/plugins/<plugin_id>/servers', methods=['GET'])
|
||||
@login_required
|
||||
def setup_plugin_servers(plugin_id):
|
||||
request_id = str(uuid4())
|
||||
try:
|
||||
service_type = ServiceType[plugin_id.upper()]
|
||||
except KeyError:
|
||||
return jsonify({'error': {'code': 'UNKNOWN_PLUGIN', 'message': 'Plugin not recognized.'}, 'meta': {'request_id': request_id}}), 404
|
||||
|
||||
servers = MediaServer.query.filter_by(service_type=service_type).all()
|
||||
data = [
|
||||
{
|
||||
'id': server.id,
|
||||
'name': server.server_nickname,
|
||||
'url': server.url,
|
||||
'is_active': server.is_active
|
||||
}
|
||||
for server in servers
|
||||
]
|
||||
return jsonify({'data': data, 'meta': {'request_id': request_id}})
|
||||
|
||||
|
||||
@bp.route('/setup/plugins/<plugin_id>/test-connection', methods=['POST'])
|
||||
def setup_test_connection(plugin_id):
|
||||
request_id = str(uuid4())
|
||||
try:
|
||||
response = test_connection_setup(plugin_id)
|
||||
if isinstance(response, tuple):
|
||||
return response
|
||||
return response
|
||||
except Exception as exc:
|
||||
return jsonify({'error': {'code': 'SETUP_TEST_FAILED', 'message': str(exc)}, 'meta': {'request_id': request_id}}), 500
|
||||
234
app/routes/api_v1/statistics.py
Normal file
234
app/routes/api_v1/statistics.py
Normal file
@@ -0,0 +1,234 @@
|
||||
from uuid import uuid4
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import func, desc
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models_media_services import MediaStreamHistory, MediaServer, ServiceType, MediaItem
|
||||
from app.utils.helpers import format_duration
|
||||
|
||||
|
||||
def _parse_days_param():
|
||||
try:
|
||||
return int(request.args.get('days', 7))
|
||||
except (TypeError, ValueError):
|
||||
return 7
|
||||
|
||||
|
||||
def _parse_services_param():
|
||||
raw = request.args.get('services')
|
||||
if not raw:
|
||||
return []
|
||||
services = []
|
||||
for item in raw.split(','):
|
||||
trimmed = item.strip().lower()
|
||||
if trimmed:
|
||||
services.append(trimmed)
|
||||
return services
|
||||
|
||||
|
||||
def _construct_poster_url(thumb_path, service_type):
|
||||
"""
|
||||
Construct a poster URL from a MediaItem's thumb_path.
|
||||
Mirrors the logic in MediaItem.to_dict()
|
||||
"""
|
||||
if not thumb_path:
|
||||
return None
|
||||
|
||||
if thumb_path.startswith('/admin/api/'):
|
||||
# Already a proxy URL with correct prefix (Jellyfin or other services)
|
||||
return thumb_path
|
||||
elif thumb_path.startswith('/api/'):
|
||||
# Legacy proxy URL without admin prefix - add it
|
||||
return f"/admin{thumb_path}"
|
||||
elif thumb_path.startswith('http'):
|
||||
# Full URL (like RomM) - use as-is
|
||||
return thumb_path
|
||||
else:
|
||||
# Plex format: regular path that needs proxy construction
|
||||
return f"/admin/api/media/{service_type}/images/proxy?path={thumb_path.lstrip('/')}"
|
||||
|
||||
|
||||
@bp.route('/statistics/watch', methods=['GET'])
|
||||
@login_required
|
||||
def get_watch_statistics():
|
||||
"""
|
||||
Returns aggregated watch statistics (movies, shows, platforms, totals)
|
||||
mirroring the legacy dashboard widget.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
days = _parse_days_param()
|
||||
service_filters = _parse_services_param()
|
||||
|
||||
end_date = datetime.now(timezone.utc)
|
||||
if days == -1:
|
||||
earliest_stream = MediaStreamHistory.query.order_by(MediaStreamHistory.started_at.asc()).first()
|
||||
start_date = earliest_stream.started_at if earliest_stream else end_date - timedelta(days=7)
|
||||
else:
|
||||
start_date = end_date - timedelta(days=days - 1)
|
||||
|
||||
base_query = MediaStreamHistory.query.filter(
|
||||
MediaStreamHistory.started_at >= start_date,
|
||||
MediaStreamHistory.started_at <= end_date
|
||||
)
|
||||
|
||||
if service_filters:
|
||||
base_query = base_query.join(MediaServer, MediaStreamHistory.server_id == MediaServer.id)
|
||||
accepted = []
|
||||
for service in service_filters:
|
||||
try:
|
||||
accepted.append(ServiceType(service))
|
||||
except ValueError:
|
||||
continue
|
||||
if accepted:
|
||||
base_query = base_query.filter(MediaServer.service_type.in_(accepted))
|
||||
|
||||
# Top movies, shows, platforms
|
||||
# Join with MediaItem to get poster URLs
|
||||
# For movies: use external_media_item_id to match MediaItem.external_id
|
||||
top_movies = base_query.filter(
|
||||
MediaStreamHistory.media_type.in_(['movie', 'film'])
|
||||
).join(
|
||||
MediaServer, MediaStreamHistory.server_id == MediaServer.id
|
||||
).outerjoin(
|
||||
MediaItem,
|
||||
(MediaStreamHistory.external_media_item_id == MediaItem.external_id) &
|
||||
(MediaStreamHistory.server_id == MediaItem.server_id)
|
||||
).with_entities(
|
||||
MediaStreamHistory.media_title,
|
||||
func.count(MediaStreamHistory.id).label('play_count'),
|
||||
func.sum(
|
||||
func.coalesce(
|
||||
MediaStreamHistory.duration_seconds,
|
||||
MediaStreamHistory.view_offset_at_end_seconds,
|
||||
60
|
||||
)
|
||||
).label('total_duration'),
|
||||
func.max(MediaItem.thumb_path).label('thumb_path'),
|
||||
func.max(MediaServer.service_type).label('service_type')
|
||||
).group_by(MediaStreamHistory.media_title).order_by(desc('play_count')).limit(5).all()
|
||||
|
||||
# For shows: group by grandparent_title (show name) and find the show poster
|
||||
# We need to join with MediaItem where item_type='show' and title matches grandparent_title
|
||||
top_shows = base_query.filter(
|
||||
MediaStreamHistory.media_type.in_(['show', 'episode', 'tv', 'series']),
|
||||
MediaStreamHistory.grandparent_title.isnot(None)
|
||||
).join(
|
||||
MediaServer, MediaStreamHistory.server_id == MediaServer.id
|
||||
).outerjoin(
|
||||
MediaItem,
|
||||
(MediaStreamHistory.grandparent_title == MediaItem.title) &
|
||||
(MediaStreamHistory.server_id == MediaItem.server_id) &
|
||||
(MediaItem.item_type == 'show')
|
||||
).with_entities(
|
||||
MediaStreamHistory.grandparent_title.label('show_title'),
|
||||
func.count(MediaStreamHistory.id).label('play_count'),
|
||||
func.sum(
|
||||
func.coalesce(
|
||||
MediaStreamHistory.duration_seconds,
|
||||
MediaStreamHistory.view_offset_at_end_seconds,
|
||||
60
|
||||
)
|
||||
).label('total_duration'),
|
||||
func.max(MediaItem.thumb_path).label('thumb_path'),
|
||||
func.max(MediaServer.service_type).label('service_type')
|
||||
).group_by(MediaStreamHistory.grandparent_title).order_by(desc('play_count')).limit(5).all()
|
||||
|
||||
top_platforms = base_query.with_entities(
|
||||
MediaStreamHistory.platform,
|
||||
func.count(MediaStreamHistory.id).label('play_count'),
|
||||
func.sum(
|
||||
func.coalesce(
|
||||
MediaStreamHistory.duration_seconds,
|
||||
MediaStreamHistory.view_offset_at_end_seconds,
|
||||
60
|
||||
)
|
||||
).label('total_duration')
|
||||
).group_by(MediaStreamHistory.platform).order_by(desc('play_count')).limit(5).all()
|
||||
|
||||
total_stats = base_query.with_entities(
|
||||
func.count(MediaStreamHistory.id).label('total_plays'),
|
||||
func.sum(
|
||||
func.coalesce(
|
||||
MediaStreamHistory.duration_seconds,
|
||||
MediaStreamHistory.view_offset_at_end_seconds,
|
||||
60
|
||||
)
|
||||
).label('total_duration'),
|
||||
func.count(func.distinct(MediaStreamHistory.media_title)).label('unique_titles'),
|
||||
func.count(func.distinct(MediaStreamHistory.user_uuid)).label('unique_users')
|
||||
).first()
|
||||
|
||||
daily_stream_counts = base_query.with_entities(
|
||||
func.date(MediaStreamHistory.started_at).label('stream_date'),
|
||||
func.count(MediaStreamHistory.id).label('daily_count')
|
||||
).group_by(func.date(MediaStreamHistory.started_at)).order_by(desc('daily_count')).first()
|
||||
|
||||
avg_session = base_query.with_entities(
|
||||
func.avg(
|
||||
func.coalesce(
|
||||
MediaStreamHistory.duration_seconds,
|
||||
MediaStreamHistory.view_offset_at_end_seconds,
|
||||
60
|
||||
)
|
||||
).label('avg_duration')
|
||||
).first()
|
||||
|
||||
payload = {
|
||||
'top_movies': [
|
||||
{
|
||||
'title': movie.media_title or 'Unknown Movie',
|
||||
'plays': movie.play_count,
|
||||
'duration': format_duration(int(movie.total_duration or 0)),
|
||||
'poster_url': _construct_poster_url(
|
||||
movie.thumb_path,
|
||||
movie.service_type.value if movie.service_type else 'plex'
|
||||
) if hasattr(movie, 'thumb_path') else None
|
||||
}
|
||||
for movie in top_movies
|
||||
],
|
||||
'top_shows': [
|
||||
{
|
||||
'title': show.show_title or 'Unknown Show',
|
||||
'plays': show.play_count,
|
||||
'duration': format_duration(int(show.total_duration or 0)),
|
||||
'poster_url': _construct_poster_url(
|
||||
show.thumb_path,
|
||||
show.service_type.value if show.service_type else 'plex'
|
||||
) if hasattr(show, 'thumb_path') else None
|
||||
}
|
||||
for show in top_shows
|
||||
],
|
||||
'top_platforms': [
|
||||
{
|
||||
'name': platform.platform or 'Unknown Platform',
|
||||
'plays': platform.play_count,
|
||||
'duration': format_duration(int(platform.total_duration or 0))
|
||||
}
|
||||
for platform in top_platforms
|
||||
],
|
||||
'totals': {
|
||||
'total_plays': total_stats.total_plays if total_stats and total_stats.total_plays else 0,
|
||||
'total_duration': format_duration(int(total_stats.total_duration or 0)) if total_stats else '0 min',
|
||||
'unique_titles': total_stats.unique_titles if total_stats else 0,
|
||||
'unique_users': total_stats.unique_users if total_stats else 0,
|
||||
'avg_session_length': format_duration(int(avg_session.avg_duration or 0)) if avg_session and avg_session.avg_duration else '0 min',
|
||||
'peak_day_streams': daily_stream_counts.daily_count if daily_stream_counts else 0
|
||||
},
|
||||
'filters': {
|
||||
'days': days,
|
||||
'services': service_filters
|
||||
}
|
||||
}
|
||||
|
||||
response = {
|
||||
'data': payload,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
67
app/routes/api_v1/streams.py
Normal file
67
app/routes/api_v1/streams.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, current_app
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
|
||||
|
||||
def _serialize_session(session_dict):
|
||||
return {
|
||||
'id': session_dict.get('session_id') or session_dict.get('uuid'),
|
||||
'rating_key': session_dict.get('rating_key'),
|
||||
'title': session_dict.get('title') or session_dict.get('media_title'),
|
||||
'grandparent_title': session_dict.get('grandparent_title'),
|
||||
'parent_title': session_dict.get('parent_title'),
|
||||
'user': session_dict.get('user', {}),
|
||||
'started_at': session_dict.get('started_at'),
|
||||
'progress_percent': session_dict.get('progress_percent'),
|
||||
'state': session_dict.get('state'),
|
||||
'server': {
|
||||
'id': session_dict.get('server_id'),
|
||||
'name': session_dict.get('server_name'),
|
||||
'service_type': session_dict.get('service_type')
|
||||
},
|
||||
'raw': session_dict
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/streams/active', methods=['GET'])
|
||||
@login_required
|
||||
def get_active_streams():
|
||||
"""
|
||||
Returns active stream count and session previews for dashboard widgets.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
sessions = []
|
||||
try:
|
||||
raw_sessions = MediaServiceManager.get_all_active_sessions() or []
|
||||
for session in raw_sessions:
|
||||
if isinstance(session, dict):
|
||||
sessions.append(_serialize_session(session))
|
||||
count = len(sessions)
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"/admin/api/v1/streams/active failed: {exc}", exc_info=True)
|
||||
sessions = []
|
||||
count = 0
|
||||
|
||||
response = {
|
||||
'data': {
|
||||
'count': count,
|
||||
'sessions': sessions[:5] # limit preview to first 5 sessions
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': 1,
|
||||
'page_size': len(sessions[:5]),
|
||||
'total_items': count,
|
||||
'total_pages': 1
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
302
app/routes/api_v1/streams_api.py
Normal file
302
app/routes/api_v1/streams_api.py
Normal file
@@ -0,0 +1,302 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy import desc, func
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models_media_services import MediaStreamHistory, MediaServer, ServiceType
|
||||
from app.models import User, EventType
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
|
||||
|
||||
def _serialize_stream(stream: MediaStreamHistory):
|
||||
return {
|
||||
'id': stream.id,
|
||||
'user_uuid': stream.user_uuid,
|
||||
'media_title': stream.media_title,
|
||||
'media_type': stream.media_type,
|
||||
'server_id': stream.server_id,
|
||||
'server_name': stream.server.server_nickname if stream.server else None,
|
||||
'started_at': stream.started_at.isoformat() if stream.started_at else None,
|
||||
'stopped_at': stream.stopped_at.isoformat() if stream.stopped_at else None,
|
||||
'duration_seconds': stream.duration_seconds,
|
||||
'platform': stream.platform,
|
||||
'grandparent_title': stream.grandparent_title,
|
||||
'parent_title': stream.parent_title,
|
||||
'library_name': stream.library_name,
|
||||
}
|
||||
|
||||
|
||||
def _apply_filters(query, user_uuid=None, service_type=None, status=None, start_date=None, end_date=None):
|
||||
if user_uuid:
|
||||
query = query.filter(MediaStreamHistory.user_uuid == user_uuid)
|
||||
if service_type:
|
||||
try:
|
||||
service_enum = ServiceType(service_type)
|
||||
query = query.filter(MediaStreamHistory.server.has(service_type=service_enum))
|
||||
except ValueError:
|
||||
query = query.filter(False)
|
||||
if status == 'active':
|
||||
query = query.filter(MediaStreamHistory.stopped_at.is_(None))
|
||||
elif status == 'completed':
|
||||
query = query.filter(MediaStreamHistory.stopped_at.isnot(None))
|
||||
if start_date:
|
||||
query = query.filter(MediaStreamHistory.started_at >= start_date)
|
||||
if end_date:
|
||||
query = query.filter(MediaStreamHistory.started_at <= end_date)
|
||||
return query
|
||||
|
||||
|
||||
@bp.route('/streams', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_streaming')
|
||||
def list_streams():
|
||||
request_id = str(uuid4())
|
||||
page = max(1, request.args.get('page', type=int) or 1)
|
||||
page_size = max(1, min(request.args.get('page_size', type=int) or 25, 100))
|
||||
user_uuid = request.args.get('user_uuid')
|
||||
service_type = request.args.get('service_type')
|
||||
status = request.args.get('status')
|
||||
start_date_str = request.args.get('start')
|
||||
end_date_str = request.args.get('end')
|
||||
|
||||
start_dt = None
|
||||
end_dt = None
|
||||
if start_date_str:
|
||||
try:
|
||||
start_dt = datetime.fromisoformat(start_date_str)
|
||||
except ValueError:
|
||||
pass
|
||||
if end_date_str:
|
||||
try:
|
||||
from datetime import timedelta
|
||||
end_dt = datetime.fromisoformat(end_date_str)
|
||||
# Include entire day if date provided without time
|
||||
if end_dt.time().isoformat() == '00:00:00':
|
||||
end_dt = end_dt + timedelta(days=1)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
query = MediaStreamHistory.query.order_by(desc(MediaStreamHistory.started_at))
|
||||
query = _apply_filters(query, user_uuid, service_type, status, start_dt, end_dt)
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_stream(stream) for stream in pagination.items],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'user_uuid': user_uuid,
|
||||
'service_type': service_type,
|
||||
'status': status,
|
||||
'start': start_date_str,
|
||||
'end': end_date_str
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/streams/<int:stream_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_streaming')
|
||||
def get_stream(stream_id):
|
||||
request_id = str(uuid4())
|
||||
stream = MediaStreamHistory.query.get_or_404(stream_id)
|
||||
return jsonify({
|
||||
'data': _serialize_stream(stream),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/streams/summary', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_streaming')
|
||||
def streams_summary():
|
||||
request_id = str(uuid4())
|
||||
start_date_str = request.args.get('start')
|
||||
end_date_str = request.args.get('end')
|
||||
service_type = request.args.get('service_type')
|
||||
user_uuid = request.args.get('user_uuid')
|
||||
|
||||
start_dt = None
|
||||
end_dt = None
|
||||
if start_date_str:
|
||||
try:
|
||||
start_dt = datetime.fromisoformat(start_date_str)
|
||||
except ValueError:
|
||||
start_dt = None
|
||||
if end_date_str:
|
||||
try:
|
||||
from datetime import timedelta
|
||||
end_dt = datetime.fromisoformat(end_date_str)
|
||||
if end_dt.time().isoformat() == '00:00:00':
|
||||
end_dt = end_dt + timedelta(days=1)
|
||||
except ValueError:
|
||||
end_dt = None
|
||||
|
||||
base_query = _apply_filters(MediaStreamHistory.query, user_uuid, service_type, None, start_dt, end_dt)
|
||||
|
||||
total_streams = base_query.count()
|
||||
active_streams = _apply_filters(MediaStreamHistory.query, user_uuid, service_type, 'active', start_dt, end_dt).count()
|
||||
completed_streams = _apply_filters(MediaStreamHistory.query, user_uuid, service_type, 'completed', start_dt, end_dt).count()
|
||||
|
||||
total_duration = base_query.with_entities(func.coalesce(func.sum(MediaStreamHistory.duration_seconds), 0)).scalar() or 0
|
||||
average_duration = base_query.with_entities(func.coalesce(func.avg(MediaStreamHistory.duration_seconds), 0)).scalar() or 0
|
||||
|
||||
daily_counts = (
|
||||
_apply_filters(MediaStreamHistory.query, user_uuid, service_type, None, start_dt, end_dt)
|
||||
.with_entities(func.date(MediaStreamHistory.started_at).label('day'), func.count(MediaStreamHistory.id))
|
||||
.group_by('day')
|
||||
.order_by('day')
|
||||
.all()
|
||||
)
|
||||
|
||||
per_service = (
|
||||
_apply_filters(
|
||||
MediaStreamHistory.query.join(MediaServer),
|
||||
user_uuid,
|
||||
service_type,
|
||||
None,
|
||||
start_dt,
|
||||
end_dt
|
||||
)
|
||||
.with_entities(MediaServer.service_type, func.count(MediaStreamHistory.id))
|
||||
.group_by(MediaServer.service_type)
|
||||
.order_by(MediaServer.service_type)
|
||||
.all()
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'counts': {
|
||||
'total': total_streams,
|
||||
'active': active_streams,
|
||||
'completed': completed_streams
|
||||
},
|
||||
'duration': {
|
||||
'total_seconds': int(total_duration),
|
||||
'average_seconds': int(average_duration)
|
||||
},
|
||||
'daily': [
|
||||
{'date': day.isoformat() if hasattr(day, 'isoformat') else str(day), 'count': count}
|
||||
for day, count in daily_counts
|
||||
],
|
||||
'by_service': [
|
||||
{'service_type': svc.value if isinstance(svc, ServiceType) else str(svc), 'count': count}
|
||||
for svc, count in per_service
|
||||
]
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'filters': {
|
||||
'start': start_date_str,
|
||||
'end': end_date_str,
|
||||
'service_type': service_type,
|
||||
'user_uuid': user_uuid
|
||||
},
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/streams/<int:stream_id>/terminate', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('kill_stream')
|
||||
def terminate_stream(stream_id):
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
message = payload.get('message')
|
||||
|
||||
stream = MediaStreamHistory.query.get_or_404(stream_id)
|
||||
|
||||
if stream.stopped_at:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'STREAM_NOT_ACTIVE',
|
||||
'message': 'The stream has already ended.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 400
|
||||
|
||||
session_key = stream.session_key or stream.external_session_id
|
||||
if not session_key:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SESSION_KEY_MISSING',
|
||||
'message': 'No session key is stored for this stream; it cannot be terminated.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 400
|
||||
|
||||
server = stream.server
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
|
||||
if not service or not hasattr(service, 'terminate_session'):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_NOT_SUPPORTED',
|
||||
'message': f"{server.service_type.value.capitalize()} does not support remote termination."
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 400
|
||||
|
||||
try:
|
||||
success = service.terminate_session(session_key, message)
|
||||
except Exception as exc:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'TERMINATION_FAILED',
|
||||
'message': f'Failed to terminate session: {exc}'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 500
|
||||
|
||||
if not success:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'TERMINATION_FAILED',
|
||||
'message': 'The media server did not accept the termination command.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 502
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Terminated {server.service_type.value} session {session_key} on {server.server_nickname}",
|
||||
admin_id=getattr(current_user, 'id', None),
|
||||
server_id=server.id
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'message': f'Termination command sent for session {session_key}.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 200
|
||||
337
app/routes/api_v1/user_roles.py
Normal file
337
app/routes/api_v1/user_roles.py
Normal file
@@ -0,0 +1,337 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import permission_required
|
||||
from app.models import UserRole, User
|
||||
from app.extensions import db
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def _serialize_user_role(role, include_users=False):
|
||||
"""Serialize a UserRole object to JSON"""
|
||||
data = {
|
||||
'id': role.id,
|
||||
'name': role.name,
|
||||
'description': role.description,
|
||||
'color': role.color,
|
||||
'icon': role.icon,
|
||||
'created_at': role.created_at.isoformat() if role.created_at else None,
|
||||
'updated_at': role.updated_at.isoformat() if role.updated_at else None
|
||||
}
|
||||
|
||||
if include_users:
|
||||
users = UserRole.get_users_with_role(role.id)
|
||||
data['users'] = [{
|
||||
'uuid': user.uuid,
|
||||
'username': user.get_display_name(),
|
||||
'user_type': user.userType.value
|
||||
} for user in users]
|
||||
data['user_count'] = len(users)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@bp.route('/user-roles', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_users')
|
||||
def list_user_roles():
|
||||
"""List all user (visual) roles with optional filtering"""
|
||||
request_id = str(uuid4())
|
||||
|
||||
include_users = request.args.get('include_users', 'false').lower() == 'true'
|
||||
|
||||
# Get all roles ordered by name
|
||||
roles = UserRole.query.order_by(UserRole.name).all()
|
||||
|
||||
return jsonify({
|
||||
'data': [_serialize_user_role(role, include_users) for role in roles],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'total_count': len(roles),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/user-roles/<role_id>', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_users')
|
||||
def get_user_role(role_id):
|
||||
"""Get a single user role by ID"""
|
||||
request_id = str(uuid4())
|
||||
role = UserRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'User role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
include_users = request.args.get('include_users', 'true').lower() == 'true'
|
||||
data = _serialize_user_role(role, include_users)
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/user-roles', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('manage_users')
|
||||
def create_user_role():
|
||||
"""Create a new user (visual) role"""
|
||||
request_id = str(uuid4())
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Validate required fields
|
||||
if not data.get('name'):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'VALIDATION_ERROR',
|
||||
'message': 'Missing required field: name',
|
||||
'details': {'missing_fields': ['name']}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 422
|
||||
|
||||
# Check if role name already exists
|
||||
existing = UserRole.query.filter_by(name=data['name']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_ROLE_NAME',
|
||||
'message': f'User role with name "{data["name"]}" already exists',
|
||||
'details': {'name': data['name']},
|
||||
'hint': 'Choose a unique name for this role'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Create role
|
||||
role = UserRole(
|
||||
name=data['name'],
|
||||
description=data.get('description'),
|
||||
color=data.get('color', '#808080'),
|
||||
icon=data.get('icon')
|
||||
)
|
||||
|
||||
try:
|
||||
db.session.add(role)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_user_role(role),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
}), 201
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_CREATION_FAILED',
|
||||
'message': 'Failed to create user role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/user-roles/<role_id>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('manage_users')
|
||||
def update_user_role(role_id):
|
||||
"""Update an existing user role"""
|
||||
request_id = str(uuid4())
|
||||
role = UserRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'User role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_REQUEST',
|
||||
'message': 'Request body must be JSON',
|
||||
'hint': 'Ensure Content-Type header is application/json'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
# Check for name conflicts if changing name
|
||||
if 'name' in data and data['name'] != role.name:
|
||||
existing = UserRole.query.filter_by(name=data['name']).first()
|
||||
if existing:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'DUPLICATE_ROLE_NAME',
|
||||
'message': f'User role with name "{data["name"]}" already exists',
|
||||
'details': {'name': data['name']}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
# Update fields
|
||||
updatable_fields = ['name', 'description', 'color', 'icon']
|
||||
for field in updatable_fields:
|
||||
if field in data:
|
||||
setattr(role, field, data[field])
|
||||
|
||||
role.updated_at = datetime.utcnow()
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_user_role(role),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_UPDATE_FAILED',
|
||||
'message': 'Failed to update user role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/user-roles/<role_id>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('manage_users')
|
||||
def delete_user_role(role_id):
|
||||
"""Delete a user role"""
|
||||
request_id = str(uuid4())
|
||||
role = UserRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'User role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
# Check if role has users assigned
|
||||
users_with_role = UserRole.get_users_with_role(role.id)
|
||||
if users_with_role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_HAS_USERS',
|
||||
'message': f'Cannot delete role that is assigned to {len(users_with_role)} user(s)',
|
||||
'details': {'user_count': len(users_with_role)},
|
||||
'hint': 'Remove this role from all users before deleting'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
role_data = _serialize_user_role(role)
|
||||
|
||||
try:
|
||||
db.session.delete(role)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'deleted_role': role_data
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_DELETION_FAILED',
|
||||
'message': 'Failed to delete user role',
|
||||
'details': {'error': str(e)}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/user-roles/<role_id>/users', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('view_users')
|
||||
def get_user_role_users(role_id):
|
||||
"""Get all users assigned to a user role"""
|
||||
request_id = str(uuid4())
|
||||
role = UserRole.query.get(role_id)
|
||||
|
||||
if not role:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ROLE_NOT_FOUND',
|
||||
'message': f'User role with ID {role_id} not found',
|
||||
'details': {'role_id': role_id}
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
users = UserRole.get_users_with_role(role.id)
|
||||
|
||||
return jsonify({
|
||||
'data': [{
|
||||
'uuid': user.uuid,
|
||||
'username': user.get_display_name(),
|
||||
'user_type': user.userType.value,
|
||||
'email': user.get_email(),
|
||||
'is_active': user.is_active
|
||||
} for user in users],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False,
|
||||
'role': {
|
||||
'id': role.id,
|
||||
'name': role.name
|
||||
},
|
||||
'total_count': len(users),
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z'
|
||||
}
|
||||
})
|
||||
170
app/routes/api_v1/users.py
Normal file
170
app/routes/api_v1/users.py
Normal file
@@ -0,0 +1,170 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import or_, func
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType
|
||||
from app.models_media_services import MediaLibrary
|
||||
|
||||
|
||||
def _get_avatar_url(user: User):
|
||||
"""Generate avatar URL for a user based on their type and settings."""
|
||||
if user.userType == UserType.OWNER:
|
||||
if user.plex_thumb:
|
||||
return user.plex_thumb
|
||||
if user.userType in {UserType.LOCAL, UserType.OWNER}:
|
||||
if user.discord_avatar_hash and user.discord_user_id:
|
||||
return f"https://cdn.discordapp.com/avatars/{user.discord_user_id}/{user.discord_avatar_hash}.png?size=256"
|
||||
if user.external_avatar_url:
|
||||
return user.external_avatar_url
|
||||
if user.userType == UserType.SERVICE:
|
||||
if user.external_avatar_url:
|
||||
return user.external_avatar_url
|
||||
service_thumb = None
|
||||
if user.service_settings:
|
||||
service_thumb = user.service_settings.get('thumb')
|
||||
if service_thumb and user.server:
|
||||
base_url = user.server.public_url or user.server.url
|
||||
if service_thumb.startswith('/'):
|
||||
return f"{base_url.rstrip('/')}{service_thumb}"
|
||||
return service_thumb
|
||||
return None
|
||||
|
||||
|
||||
@bp.route('/users', methods=['GET'])
|
||||
@login_required
|
||||
def list_users():
|
||||
request_id = str(uuid4())
|
||||
page = max(1, request.args.get('page', type=int) or 1)
|
||||
page_size = request.args.get('page_size', type=int) or 25
|
||||
page_size = max(1, min(page_size, 100))
|
||||
|
||||
search = (request.args.get('search') or '').strip()
|
||||
user_type = (request.args.get('user_type') or '').strip().lower()
|
||||
role_filter = request.args.get('role')
|
||||
|
||||
query = User.query
|
||||
|
||||
if user_type in {'owner', 'local', 'service'}:
|
||||
try:
|
||||
query = query.filter(User.userType == UserType(user_type))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if search:
|
||||
term = f"%{search}%"
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(User.localUsername).like(func.lower(term)),
|
||||
func.lower(User.email).like(func.lower(term)),
|
||||
func.lower(User.discord_email).like(func.lower(term))
|
||||
)
|
||||
)
|
||||
|
||||
if role_filter:
|
||||
query = query.join(User.admin_roles).filter(func.lower(User.admin_roles.any().name) == func.lower(role_filter))
|
||||
|
||||
sort = (request.args.get('sort') or '').lower()
|
||||
|
||||
if sort == 'username_asc':
|
||||
query = query.order_by(func.lower(User.localUsername).asc())
|
||||
elif sort == 'username_desc':
|
||||
query = query.order_by(func.lower(User.localUsername).desc())
|
||||
elif sort == 'created_asc':
|
||||
query = query.order_by(User.created_at.asc())
|
||||
else:
|
||||
query = query.order_by(User.created_at.desc())
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
items = []
|
||||
for user in pagination.items:
|
||||
user_data = {
|
||||
'id': user.id,
|
||||
'uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'email': user.email or user.discord_email,
|
||||
'user_type': user.userType.value,
|
||||
'display_name': user.get_display_name() if hasattr(user, 'get_display_name') else None,
|
||||
'avatar_url': _get_avatar_url(user),
|
||||
'created_at': user.created_at.isoformat() if user.created_at else None,
|
||||
'last_login_at': user.last_login_at.isoformat() if user.last_login_at else None,
|
||||
'is_active': user.is_active,
|
||||
'admin_roles': [role.name for role in user.admin_roles],
|
||||
'user_roles': [
|
||||
{
|
||||
'name': role.name,
|
||||
'color': getattr(role, 'color', None),
|
||||
'icon': getattr(role, 'icon', None),
|
||||
'description': getattr(role, 'description', None)
|
||||
}
|
||||
for role in getattr(user, 'user_roles', [])
|
||||
],
|
||||
'linked_service_count': len(getattr(user, 'linked_children', []))
|
||||
}
|
||||
|
||||
if user.userType == UserType.SERVICE and user.linkedUserId:
|
||||
linked_parent = User.query.filter_by(uuid=user.linkedUserId).first()
|
||||
if linked_parent:
|
||||
user_data['linked_local_user'] = {
|
||||
'uuid': linked_parent.uuid,
|
||||
'username': linked_parent.localUsername,
|
||||
'display_name': linked_parent.get_display_name() if hasattr(linked_parent, 'get_display_name') else linked_parent.localUsername
|
||||
}
|
||||
else:
|
||||
user_data['linked_local_user'] = None
|
||||
|
||||
# Add server info and libraries for service users
|
||||
if user.userType == UserType.SERVICE and user.server:
|
||||
user_data['server_nickname'] = user.server.server_nickname
|
||||
user_data['service_type'] = user.server.service_type.value
|
||||
user_data['service_join_date'] = user.service_join_date.isoformat() if user.service_join_date else None
|
||||
user_data['last_streamed_at'] = user.last_activity_at.isoformat() if user.last_activity_at else None
|
||||
|
||||
libraries = []
|
||||
has_all_libraries = False
|
||||
|
||||
if user.allowed_library_ids:
|
||||
from sqlalchemy import or_
|
||||
|
||||
allowed_ids = [str(value) for value in user.allowed_library_ids]
|
||||
libs = MediaLibrary.query.filter(
|
||||
MediaLibrary.server_id == user.server_id,
|
||||
or_(
|
||||
MediaLibrary.external_id.in_(allowed_ids),
|
||||
MediaLibrary.internal_id.in_(allowed_ids)
|
||||
)
|
||||
).all()
|
||||
libraries = [library.name for library in libs]
|
||||
else:
|
||||
has_all_libraries = True
|
||||
|
||||
user_data['libraries'] = libraries
|
||||
user_data['has_all_libraries'] = has_all_libraries
|
||||
|
||||
items.append(user_data)
|
||||
|
||||
response = {
|
||||
'data': items,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'search': search,
|
||||
'user_type': user_type,
|
||||
'role': role_filter
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
49
app/routes/api_v1/users_actions.py
Normal file
49
app/routes/api_v1/users_actions.py
Normal file
@@ -0,0 +1,49 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, EventType
|
||||
from app.utils.helpers import log_event, permission_required
|
||||
from app.extensions import db
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/reset-password', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def reset_user_password(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
if user.userType not in {User.UserType.LOCAL, User.UserType.OWNER}:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'UNSUPPORTED_USER_TYPE',
|
||||
'message': 'Password reset is only supported for local or owner accounts.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}), 400
|
||||
|
||||
user.force_password_change = True
|
||||
db.session.commit()
|
||||
|
||||
log_event(
|
||||
EventType.ADMIN_PASSWORD_CHANGE,
|
||||
f"Password reset initiated for user '{user.localUsername}'.",
|
||||
admin_id=current_user.id
|
||||
)
|
||||
|
||||
response = {
|
||||
'data': {
|
||||
'success': True,
|
||||
'message': 'Password reset flagged. User will be prompted to set a new password on next login.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
248
app/routes/api_v1/users_bulk.py
Normal file
248
app/routes/api_v1/users_bulk.py
Normal file
@@ -0,0 +1,248 @@
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import User, UserType, EventType
|
||||
from app.routes.api_v1 import bp
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _status_entry(user: User, action: str, status: str, message: str | None = None):
|
||||
entry = {
|
||||
'user_uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'action': action,
|
||||
'status': status
|
||||
}
|
||||
if message:
|
||||
entry['message'] = message
|
||||
return entry
|
||||
|
||||
|
||||
@bp.route('/users/bulk', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('mass_edit_users')
|
||||
def bulk_user_operations():
|
||||
request_id = str(uuid4())
|
||||
payload = request.get_json(silent=True) or {}
|
||||
user_uuids = payload.get('user_uuids') or payload.get('users') or []
|
||||
operations = payload.get('operations') or []
|
||||
|
||||
if not isinstance(user_uuids, list) or not user_uuids:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'user_uuids must be a non-empty list.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
if not isinstance(operations, list) or not operations:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'operations must be a non-empty list.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
users = User.query.filter(User.uuid.in_(user_uuids)).all()
|
||||
if not users:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'USERS_NOT_FOUND',
|
||||
'message': 'No matching users were found.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
results = []
|
||||
stats = {
|
||||
'updated': 0,
|
||||
'deleted': 0,
|
||||
'skipped': 0,
|
||||
'errors': 0
|
||||
}
|
||||
|
||||
actions_executed = [op.get('action') for op in operations if isinstance(op, dict)]
|
||||
|
||||
for user in users:
|
||||
deleted = False
|
||||
|
||||
for operation in operations:
|
||||
if deleted:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, operation.get('action', 'unknown'), 'skipped', 'User already deleted in this batch.'))
|
||||
continue
|
||||
|
||||
if not isinstance(operation, dict):
|
||||
stats['errors'] += 1
|
||||
results.append(_status_entry(user, 'unknown', 'error', 'Operation must be a JSON object.'))
|
||||
continue
|
||||
|
||||
action = operation.get('action')
|
||||
if not action:
|
||||
stats['errors'] += 1
|
||||
results.append(_status_entry(user, 'unknown', 'error', 'Operation missing "action".'))
|
||||
continue
|
||||
|
||||
try:
|
||||
if action == 'set_is_active':
|
||||
value = bool(operation.get('value', True))
|
||||
if user.userType == UserType.OWNER and not value:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'Owner account cannot be deactivated.'))
|
||||
continue
|
||||
if user.is_active == value:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'No change required.'))
|
||||
continue
|
||||
user.is_active = value
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'update_libraries':
|
||||
library_ids = operation.get('library_ids') or []
|
||||
if user.userType != UserType.SERVICE:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'Libraries can only be set for service accounts.'))
|
||||
continue
|
||||
if not isinstance(library_ids, list):
|
||||
raise ValueError('library_ids must be a list.')
|
||||
user.allowed_library_ids = library_ids
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'extend_access':
|
||||
days = operation.get('days')
|
||||
if not isinstance(days, int) or days <= 0:
|
||||
raise ValueError('days must be a positive integer.')
|
||||
base = user.access_expires_at or datetime.utcnow()
|
||||
user.access_expires_at = base + timedelta(days=days)
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'set_expiration':
|
||||
expires_at = operation.get('expires_at')
|
||||
if not expires_at:
|
||||
raise ValueError('expires_at is required.')
|
||||
try:
|
||||
parsed = datetime.fromisoformat(expires_at)
|
||||
except ValueError as exc:
|
||||
raise ValueError(f'Invalid ISO datetime: {exc}') from exc
|
||||
user.access_expires_at = parsed
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'clear_expiration':
|
||||
user.access_expires_at = None
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'add_to_purge_whitelist':
|
||||
if not user.is_purge_whitelisted:
|
||||
user.is_purge_whitelisted = True
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'User already purged whitelist.'))
|
||||
|
||||
elif action == 'remove_from_purge_whitelist':
|
||||
if user.is_purge_whitelisted:
|
||||
user.is_purge_whitelisted = False
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'User not in purge whitelist.'))
|
||||
|
||||
elif action == 'add_to_bot_whitelist':
|
||||
if not user.is_discord_bot_whitelisted:
|
||||
user.is_discord_bot_whitelisted = True
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'User already in bot whitelist.'))
|
||||
|
||||
elif action == 'remove_from_bot_whitelist':
|
||||
if user.is_discord_bot_whitelisted:
|
||||
user.is_discord_bot_whitelisted = False
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'User not in bot whitelist.'))
|
||||
|
||||
elif action == 'allow_downloads':
|
||||
value = bool(operation.get('value', True))
|
||||
if user.userType != UserType.SERVICE:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'Downloads flag applies to service users only.'))
|
||||
continue
|
||||
if user.allow_downloads == value:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'No change required.'))
|
||||
continue
|
||||
user.allow_downloads = value
|
||||
stats['updated'] += 1
|
||||
results.append(_status_entry(user, action, 'updated'))
|
||||
|
||||
elif action == 'delete_users':
|
||||
if user.userType == UserType.OWNER:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'Owner account cannot be deleted.'))
|
||||
continue
|
||||
|
||||
if user.userType == UserType.LOCAL:
|
||||
for child in getattr(user, 'linked_children', []) or []:
|
||||
child.linkedUserId = None
|
||||
|
||||
db.session.delete(user)
|
||||
deleted = True
|
||||
stats['deleted'] += 1
|
||||
results.append(_status_entry(user, action, 'deleted'))
|
||||
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
results.append(_status_entry(user, action, 'skipped', 'Unsupported action.'))
|
||||
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Bulk user action '{action}' failed for {user.uuid}: {exc}", exc_info=True)
|
||||
stats['errors'] += 1
|
||||
results.append(_status_entry(user, action, 'error', str(exc)))
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to commit bulk user operations: {exc}", exc_info=True)
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'BULK_UPDATE_FAILED',
|
||||
'message': 'Database error while applying bulk operations.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Bulk user operations executed ({', '.join(actions_executed)}). "
|
||||
f"Updated: {stats['updated']}, Deleted: {stats['deleted']}, Skipped: {stats['skipped']}, Errors: {stats['errors']}.",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'summary': stats,
|
||||
'results': results
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
300
app/routes/api_v1/users_detail.py
Normal file
300
app/routes/api_v1/users_detail.py
Normal file
@@ -0,0 +1,300 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, AdminRole, UserRole, EventType
|
||||
from app.models_media_services import MediaLibrary
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
from app.services import user_service
|
||||
|
||||
|
||||
def _serialize_roles(user):
|
||||
return {
|
||||
'admin_roles': [role.name for role in getattr(user, 'admin_roles', [])],
|
||||
'user_roles': [role.name for role in getattr(user, 'user_roles', [])]
|
||||
}
|
||||
|
||||
|
||||
def _serialize_service_accounts(user):
|
||||
linked = []
|
||||
for child in getattr(user, 'linked_children', []) or []:
|
||||
if child.userType == UserType.SERVICE:
|
||||
linked.append({
|
||||
'uuid': child.uuid,
|
||||
'service_type': child.server.service_type.value if child.server else None,
|
||||
'server_name': child.server.server_nickname if child.server else None,
|
||||
'external_username': child.external_username,
|
||||
'external_email': child.external_email,
|
||||
'linked_at': child.created_at.isoformat() if child.created_at else None
|
||||
})
|
||||
return linked
|
||||
|
||||
|
||||
def _serialize_history_entry(entry):
|
||||
return {
|
||||
'id': entry.id,
|
||||
'timestamp': entry.timestamp.isoformat() if entry.timestamp else None,
|
||||
'event_type': entry.event_type.value if entry.event_type else None,
|
||||
'message': entry.message,
|
||||
'details': entry.details or {}
|
||||
}
|
||||
|
||||
|
||||
def _get_avatar_url(user: User):
|
||||
if user.userType == UserType.OWNER:
|
||||
if user.plex_thumb:
|
||||
return user.plex_thumb
|
||||
if user.userType in {UserType.LOCAL, UserType.OWNER}:
|
||||
if user.discord_avatar_hash and user.discord_user_id:
|
||||
return f"https://cdn.discordapp.com/avatars/{user.discord_user_id}/{user.discord_avatar_hash}.png?size=256"
|
||||
if user.external_avatar_url:
|
||||
return user.external_avatar_url
|
||||
if user.userType == UserType.SERVICE:
|
||||
if user.external_avatar_url:
|
||||
return user.external_avatar_url
|
||||
service_thumb = None
|
||||
if user.service_settings:
|
||||
service_thumb = user.service_settings.get('thumb')
|
||||
if service_thumb and user.server:
|
||||
base_url = user.server.public_url or user.server.url
|
||||
if service_thumb.startswith('/'):
|
||||
return f"{base_url.rstrip('/')}{service_thumb}"
|
||||
return service_thumb
|
||||
return None
|
||||
|
||||
|
||||
def _collect_service_context(user: User):
|
||||
service_types = []
|
||||
server_names = []
|
||||
|
||||
if user.userType == UserType.SERVICE:
|
||||
if user.server and user.server.service_type:
|
||||
service_types.append(user.server.service_type.value)
|
||||
server_names.append(user.server.server_nickname)
|
||||
else:
|
||||
for child in getattr(user, 'linked_children', []) or []:
|
||||
if child.userType == UserType.SERVICE and child.server and child.server.service_type:
|
||||
service_types.append(child.server.service_type.value)
|
||||
server_names.append(child.server.server_nickname)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
unique_service_types = []
|
||||
for service in service_types:
|
||||
if service not in unique_service_types:
|
||||
unique_service_types.append(service)
|
||||
|
||||
unique_server_names = []
|
||||
for server in server_names:
|
||||
if server not in unique_server_names:
|
||||
unique_server_names.append(server)
|
||||
|
||||
return {
|
||||
'service_types': unique_service_types,
|
||||
'server_names': unique_server_names,
|
||||
'primary_service_type': unique_service_types[0] if unique_service_types else None,
|
||||
'primary_server_name': unique_server_names[0] if unique_server_names else None
|
||||
}
|
||||
|
||||
|
||||
def _serialize_linked_local_user(user: User):
|
||||
if user.userType != UserType.SERVICE or not user.linked_parent:
|
||||
return None
|
||||
|
||||
parent = user.linked_parent
|
||||
return {
|
||||
'uuid': parent.uuid,
|
||||
'username': parent.localUsername,
|
||||
'display_name': parent.get_display_name() if hasattr(parent, 'get_display_name') else parent.localUsername,
|
||||
'email': parent.email
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>', methods=['GET'])
|
||||
@login_required
|
||||
def get_user_detail(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
service_context = _collect_service_context(user)
|
||||
avatar_url = _get_avatar_url(user)
|
||||
linked_local_user = _serialize_linked_local_user(user)
|
||||
|
||||
libraries = []
|
||||
has_all_libraries = True
|
||||
if user.userType == UserType.SERVICE:
|
||||
has_all_libraries = not bool(user.allowed_library_ids)
|
||||
if user.allowed_library_ids and user.server_id:
|
||||
allowed_ids = [str(value) for value in user.allowed_library_ids]
|
||||
libs = MediaLibrary.query.filter(
|
||||
MediaLibrary.server_id == user.server_id,
|
||||
or_(
|
||||
MediaLibrary.external_id.in_(allowed_ids),
|
||||
MediaLibrary.internal_id.in_(allowed_ids)
|
||||
)
|
||||
).all()
|
||||
libraries = [library.name for library in libs]
|
||||
|
||||
stream_stats = {'global': {}, 'players': []}
|
||||
try:
|
||||
stream_stats = user_service.get_user_stream_stats(user.uuid) or {'global': {}, 'players': []}
|
||||
except Exception as exc:
|
||||
current_app.logger.warning(f"Failed to load stream stats for user {user_uuid}: {exc}", exc_info=True)
|
||||
|
||||
global_stats = stream_stats.get('global', {}) if isinstance(stream_stats, dict) else {}
|
||||
total_plays = global_stats.get('all_time_plays', 0)
|
||||
total_duration_seconds = global_stats.get('all_time_duration_seconds', 0)
|
||||
|
||||
data = {
|
||||
'uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'email': user.email or user.discord_email,
|
||||
'user_type': user.userType.value,
|
||||
'display_name': user.get_display_name() if hasattr(user, 'get_display_name') else None,
|
||||
'created_at': user.created_at.isoformat() if user.created_at else None,
|
||||
'last_login_at': user.last_login_at.isoformat() if user.last_login_at else None,
|
||||
'is_active': user.is_active,
|
||||
'notes': user.notes,
|
||||
'roles': _serialize_roles(user),
|
||||
'user_roles_detail': [
|
||||
{
|
||||
'name': role.name,
|
||||
'color': getattr(role, 'color', None),
|
||||
'icon': getattr(role, 'icon', None),
|
||||
'description': getattr(role, 'description', None)
|
||||
}
|
||||
for role in getattr(user, 'user_roles', [])
|
||||
],
|
||||
'service_accounts': _serialize_service_accounts(user),
|
||||
'history': [
|
||||
_serialize_history_entry(entry)
|
||||
for entry in user.history_logs.order_by(User.history_logs.property.mapper.class_.timestamp.desc()).limit(10)
|
||||
] if hasattr(user, 'history_logs') else []
|
||||
}
|
||||
|
||||
data.update({
|
||||
'local_username': user.localUsername,
|
||||
'external_username': user.external_username,
|
||||
'external_email': user.external_email,
|
||||
'external_user_id': user.external_user_id,
|
||||
'external_user_alt_id': user.external_user_alt_id,
|
||||
'discord_username': user.discord_username,
|
||||
'discord_user_id': user.discord_user_id,
|
||||
'discord_email': user.discord_email,
|
||||
'discord_avatar_url': f"https://cdn.discordapp.com/avatars/{user.discord_user_id}/{user.discord_avatar_hash}.png?size=256"
|
||||
if user.discord_avatar_hash and user.discord_user_id else None,
|
||||
'avatar_url': avatar_url,
|
||||
'service_type': service_context.get('primary_service_type'),
|
||||
'service_types': service_context.get('service_types'),
|
||||
'server_nickname': service_context.get('primary_server_name'),
|
||||
'server_names': service_context.get('server_names'),
|
||||
'linked_local_user': linked_local_user,
|
||||
'libraries': libraries,
|
||||
'has_all_libraries': has_all_libraries,
|
||||
'last_activity_at': user.last_activity_at.isoformat() if user.last_activity_at else None,
|
||||
'service_join_date': user.service_join_date.isoformat() if user.service_join_date else None,
|
||||
'access_expires_at': user.access_expires_at.isoformat() if user.access_expires_at else None,
|
||||
'allow_downloads': bool(getattr(user, 'allow_downloads', False)),
|
||||
'allow_4k_transcode': bool(getattr(user, 'allow_4k_transcode', False)),
|
||||
'is_purge_whitelisted': bool(getattr(user, 'is_purge_whitelisted', False)),
|
||||
'is_discord_bot_whitelisted': bool(getattr(user, 'is_discord_bot_whitelisted', False)),
|
||||
'is_home_user': bool(getattr(user, 'is_home_user', False)),
|
||||
'shares_back': bool(getattr(user, 'shares_back', False)),
|
||||
'has_password': bool(user.password_hash),
|
||||
'used_invite': bool(user.used_invite_id),
|
||||
'force_password_change': bool(getattr(user, 'force_password_change', False)),
|
||||
'stream_stats': stream_stats,
|
||||
'total_plays': total_plays,
|
||||
'total_duration_seconds': total_duration_seconds
|
||||
})
|
||||
|
||||
response = {
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def update_user_detail(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
notes = payload.get('notes')
|
||||
is_active = payload.get('is_active')
|
||||
admin_role_ids = payload.get('admin_role_ids')
|
||||
user_role_ids = payload.get('user_role_ids')
|
||||
|
||||
try:
|
||||
if notes is not None:
|
||||
user.notes = notes
|
||||
|
||||
if is_active is not None:
|
||||
if user.userType == UserType.OWNER and not is_active:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'CANNOT_DEACTIVATE_OWNER',
|
||||
'message': 'Owner account cannot be deactivated.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
user.is_active = bool(is_active)
|
||||
|
||||
if admin_role_ids is not None:
|
||||
if user.userType != UserType.LOCAL:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_USER_TYPE',
|
||||
'message': 'Admin roles can only be managed for local accounts.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
roles = AdminRole.query.filter(AdminRole.id.in_(admin_role_ids)).all()
|
||||
user.set_admin_roles(roles)
|
||||
|
||||
if user_role_ids is not None:
|
||||
roles = UserRole.query.filter(UserRole.id.in_(user_role_ids)).all()
|
||||
user.user_roles = roles
|
||||
|
||||
db.session.commit()
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Failed to update user {user_uuid}: {exc}", exc_info=True)
|
||||
db.session.rollback()
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'UPDATE_FAILED',
|
||||
'message': 'Failed to update user.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 500
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"User '{user.localUsername or user.external_username}' updated via API.",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'uuid': user.uuid,
|
||||
'notes': user.notes,
|
||||
'is_active': user.is_active,
|
||||
'roles': _serialize_roles(user)
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
80
app/routes/api_v1/users_history.py
Normal file
80
app/routes/api_v1/users_history.py
Normal file
@@ -0,0 +1,80 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
from sqlalchemy import desc
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, HistoryLog, EventType
|
||||
|
||||
|
||||
def _serialize_history(log):
|
||||
return {
|
||||
'id': log.id,
|
||||
'timestamp': log.timestamp.isoformat() if log.timestamp else None,
|
||||
'event_type': log.event_type.value if log.event_type else None,
|
||||
'message': log.message,
|
||||
'details': log.details or {}
|
||||
}
|
||||
|
||||
|
||||
def _apply_user_filter(query, user: User):
|
||||
if user.userType == UserType.OWNER:
|
||||
return query.filter(HistoryLog.owner_id == user.id)
|
||||
if user.userType == UserType.LOCAL:
|
||||
return query.filter(HistoryLog.local_user_id == user.id)
|
||||
if user.userType == UserType.SERVICE and user.linked_parent:
|
||||
return query.filter(HistoryLog.local_user_id == user.linked_parent.id)
|
||||
return query.filter(False)
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/history', methods=['GET'])
|
||||
@login_required
|
||||
def get_user_history(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
page = max(1, request.args.get('page', type=int) or 1)
|
||||
page_size = request.args.get('page_size', type=int) or 25
|
||||
page_size = max(1, min(page_size, 100))
|
||||
|
||||
event_types_param = request.args.get('event_types')
|
||||
event_filters = []
|
||||
if event_types_param:
|
||||
for token in event_types_param.split(','):
|
||||
key = token.strip().upper()
|
||||
if not key:
|
||||
continue
|
||||
try:
|
||||
event_filters.append(EventType[key])
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
query = HistoryLog.query.order_by(desc(HistoryLog.timestamp))
|
||||
query = _apply_user_filter(query, user)
|
||||
|
||||
if event_filters:
|
||||
query = query.filter(HistoryLog.event_type.in_(event_filters))
|
||||
|
||||
pagination = query.paginate(page=page, per_page=page_size, error_out=False)
|
||||
|
||||
response = {
|
||||
'data': [_serialize_history(log) for log in pagination.items],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'deprecated': False,
|
||||
'pagination': {
|
||||
'page': pagination.page,
|
||||
'page_size': pagination.per_page,
|
||||
'total_items': pagination.total,
|
||||
'total_pages': pagination.pages or 1
|
||||
},
|
||||
'filters': {
|
||||
'event_types': event_filters and [event.value for event in event_filters] or []
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
53
app/routes/api_v1/users_overseerr.py
Normal file
53
app/routes/api_v1/users_overseerr.py
Normal file
@@ -0,0 +1,53 @@
|
||||
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import jsonify
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models_overseerr import OverseerrUserLink
|
||||
from app.models import User, UserType
|
||||
|
||||
|
||||
def _serialize_overseerr_link(link: OverseerrUserLink):
|
||||
return {
|
||||
'server_id': link.server_id,
|
||||
'server_name': link.server.server_nickname if link.server else None,
|
||||
'overseerr_user_id': link.overseerr_user_id,
|
||||
'overseerr_username': link.overseerr_username,
|
||||
'overseerr_email': link.overseerr_email,
|
||||
'is_linked': link.is_linked,
|
||||
'last_sync_at': link.last_sync_at.isoformat() if link.last_sync_at else None
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/overseerr', methods=['GET'])
|
||||
@login_required
|
||||
def get_user_overseerr(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
# For service accounts, attempt to use linked parent
|
||||
target_uuid = user.linkedUserId if user.userType == UserType.SERVICE else user.uuid
|
||||
local_user = User.query.filter_by(uuid=target_uuid).first()
|
||||
|
||||
if not local_user:
|
||||
return jsonify({
|
||||
'data': [],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
links = OverseerrUserLink.query.filter_by(plex_user_id=local_user.plex_uuid).all()
|
||||
|
||||
response = {
|
||||
'data': [_serialize_overseerr_link(link) for link in links],
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}
|
||||
return jsonify(response), 200
|
||||
215
app/routes/api_v1/users_purge.py
Normal file
215
app/routes/api_v1/users_purge.py
Normal file
@@ -0,0 +1,215 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, EventType
|
||||
from app.services import user_service
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
@bp.route('/users/eligible-for-purge', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('purge_users')
|
||||
def get_eligible_for_purge():
|
||||
"""Get users eligible for purge based on criteria."""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Get query parameters
|
||||
inactive_days = request.args.get('inactive_days', 180, type=int)
|
||||
exclude_sharers = request.args.get('exclude_sharers', 'true').lower() == 'true'
|
||||
exclude_whitelisted = request.args.get('exclude_whitelisted', 'true').lower() == 'true'
|
||||
ignore_creation_date = request.args.get('ignore_creation_date', 'false').lower() == 'true'
|
||||
|
||||
try:
|
||||
current_app.logger.info(
|
||||
f"Getting users eligible for purge: inactive_days={inactive_days}, "
|
||||
f"exclude_sharers={exclude_sharers}, exclude_whitelisted={exclude_whitelisted}, "
|
||||
f"ignore_creation_date={ignore_creation_date}"
|
||||
)
|
||||
|
||||
# Get eligible users from service
|
||||
eligible_users = user_service.get_users_eligible_for_purge(
|
||||
inactive_days_threshold=inactive_days,
|
||||
exclude_sharers=exclude_sharers,
|
||||
exclude_whitelisted=exclude_whitelisted,
|
||||
ignore_creation_date_for_never_streamed=ignore_creation_date
|
||||
)
|
||||
|
||||
# Serialize users for API response
|
||||
users_data = []
|
||||
for user_data in eligible_users:
|
||||
users_data.append({
|
||||
'uuid': user_data.get('uuid'),
|
||||
'username': user_data.get('username'),
|
||||
'email': user_data.get('email'),
|
||||
'server_name': user_data.get('server_name'),
|
||||
'service_type': user_data.get('service_type'),
|
||||
'avatar_url': user_data.get('avatar_url'),
|
||||
'created_at': user_data.get('created_at').isoformat() if user_data.get('created_at') else None,
|
||||
'last_streamed_at': user_data.get('last_streamed_at').isoformat() if user_data.get('last_streamed_at') else None,
|
||||
'last_login_at': user_data.get('last_login_at').isoformat() if user_data.get('last_login_at') else None,
|
||||
'days_since_activity': user_data.get('days_since_activity'),
|
||||
'is_sharer': user_data.get('is_sharer', False),
|
||||
'is_whitelisted': user_data.get('is_whitelisted', False)
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'users': users_data,
|
||||
'criteria': {
|
||||
'inactive_days': inactive_days,
|
||||
'exclude_sharers': exclude_sharers,
|
||||
'exclude_whitelisted': exclude_whitelisted,
|
||||
'ignore_creation_date': ignore_creation_date
|
||||
}
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Error getting eligible users for purge: {exc}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': {
|
||||
'message': f'Failed to get eligible users: {str(exc)}',
|
||||
'type': 'ServerError'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/users/purge', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('purge_users')
|
||||
def purge_users():
|
||||
"""Purge selected users."""
|
||||
request_id = str(uuid4())
|
||||
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'message': 'Request body is required',
|
||||
'type': 'ValidationError'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 400
|
||||
|
||||
user_uuids = data.get('user_uuids', [])
|
||||
if not user_uuids:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'message': 'user_uuids is required and must not be empty',
|
||||
'type': 'ValidationError'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 400
|
||||
|
||||
# Get criteria for logging
|
||||
criteria = data.get('criteria', {})
|
||||
inactive_days = criteria.get('inactive_days')
|
||||
exclude_sharers = criteria.get('exclude_sharers')
|
||||
exclude_whitelisted = criteria.get('exclude_whitelisted')
|
||||
ignore_creation_date = criteria.get('ignore_creation_date', False)
|
||||
|
||||
current_app.logger.info(
|
||||
f"Purging {len(user_uuids)} users with criteria: "
|
||||
f"inactive_days={inactive_days}, exclude_sharers={exclude_sharers}, "
|
||||
f"exclude_whitelisted={exclude_whitelisted}"
|
||||
)
|
||||
|
||||
# Convert UUIDs to internal IDs for the service
|
||||
user_ids_to_purge = []
|
||||
uuid_to_user_map = {}
|
||||
|
||||
for user_uuid in user_uuids:
|
||||
from app.utils.helpers import get_user_by_uuid
|
||||
user_obj, user_type = get_user_by_uuid(user_uuid)
|
||||
if user_obj:
|
||||
user_ids_to_purge.append(user_obj.id)
|
||||
uuid_to_user_map[user_obj.id] = {
|
||||
'uuid': user_uuid,
|
||||
'username': user_obj.external_username if user_type == 'user_media_access' else user_obj.localUsername,
|
||||
'user_type': user_type
|
||||
}
|
||||
else:
|
||||
current_app.logger.warning(f"User not found for UUID: {user_uuid}")
|
||||
|
||||
if not user_ids_to_purge:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'message': 'No valid users found for the provided UUIDs',
|
||||
'type': 'ValidationError'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 400
|
||||
|
||||
# Perform the purge
|
||||
results = user_service.purge_inactive_users(
|
||||
user_ids_to_purge=user_ids_to_purge,
|
||||
admin_id=current_user.id,
|
||||
inactive_days_threshold=inactive_days,
|
||||
exclude_sharers=exclude_sharers,
|
||||
exclude_whitelisted=exclude_whitelisted,
|
||||
ignore_creation_date_for_never_streamed=ignore_creation_date
|
||||
)
|
||||
|
||||
# Build detailed results
|
||||
detailed_results = []
|
||||
for user_id in user_ids_to_purge:
|
||||
user_info = uuid_to_user_map.get(user_id, {})
|
||||
detailed_results.append({
|
||||
'user_uuid': user_info.get('uuid', 'unknown'),
|
||||
'username': user_info.get('username', 'Unknown'),
|
||||
'success': True, # Assume success unless we get specific error info
|
||||
'message': 'User purged successfully'
|
||||
})
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Purged {results.get('deleted', 0)} inactive users. Criteria: {inactive_days} inactive days.",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'deleted': results.get('deleted', 0),
|
||||
'failed': results.get('errors', 0),
|
||||
'results': detailed_results,
|
||||
'message': results.get('message', 'Purge completed')
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"Error purging users: {exc}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': {
|
||||
'message': f'Failed to purge users: {str(exc)}',
|
||||
'type': 'ServerError'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 500
|
||||
157
app/routes/api_v1/users_service_accounts.py
Normal file
157
app/routes/api_v1/users_service_accounts.py
Normal file
@@ -0,0 +1,157 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, EventType
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import log_event, permission_required
|
||||
|
||||
|
||||
def _serialize_service_account(account: User):
|
||||
return {
|
||||
'uuid': account.uuid,
|
||||
'service_type': account.server.service_type.value if account.server else None,
|
||||
'server_name': account.server.server_nickname if account.server else None,
|
||||
'external_username': account.external_username,
|
||||
'external_email': account.external_email,
|
||||
'linked_at': account.created_at.isoformat() if account.created_at else None
|
||||
}
|
||||
|
||||
|
||||
def _get_local_user(uuid: str):
|
||||
user = User.query.filter_by(uuid=uuid).first_or_404()
|
||||
if user.userType not in {UserType.LOCAL, UserType.OWNER}:
|
||||
from flask import abort
|
||||
abort(400, description='Service accounts can only be linked to local/admin users.')
|
||||
return user
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/service-accounts', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def list_service_accounts(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = _get_local_user(user_uuid)
|
||||
accounts = getattr(user, 'linked_children', []) or []
|
||||
data = [_serialize_service_account(account) for account in accounts if account.userType == UserType.SERVICE]
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/service-accounts', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def link_service_account(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = _get_local_user(user_uuid)
|
||||
payload = request.get_json() or {}
|
||||
service_uuid = payload.get('service_uuid')
|
||||
if not service_uuid:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'MISSING_SERVICE_UUID',
|
||||
'message': 'service_uuid is required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
service_user = User.query.filter_by(uuid=service_uuid, userType=UserType.SERVICE).first()
|
||||
if not service_user:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_USER_NOT_FOUND',
|
||||
'message': 'Service account not found.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
if service_user.linkedUserId:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_USER_ALREADY_LINKED',
|
||||
'message': 'Service account is already linked to another user.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 409
|
||||
|
||||
service_user.linkedUserId = user.uuid
|
||||
db.session.commit()
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Service account '{service_user.external_username}' linked to user '{user.localUsername}'."
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_service_account(service_user),
|
||||
'meta': {'request_id': request_id}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/service-accounts/<string:service_uuid>', methods=['DELETE'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def unlink_service_account(user_uuid, service_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = _get_local_user(user_uuid)
|
||||
service_user = User.query.filter_by(uuid=service_uuid, userType=UserType.SERVICE).first()
|
||||
if not service_user or service_user.linkedUserId != user.uuid:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'SERVICE_USER_NOT_LINKED',
|
||||
'message': 'Service account is not linked to this user.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
service_user.linkedUserId = None
|
||||
db.session.commit()
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Service account '{service_user.external_username}' unlinked from user '{user.localUsername}'."
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {'success': True},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/available-service-accounts', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def list_available_service_accounts(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = _get_local_user(user_uuid)
|
||||
|
||||
standalone_users = User.query.filter_by(userType=UserType.SERVICE).filter(
|
||||
User.linkedUserId.is_(None)
|
||||
).all()
|
||||
|
||||
data = []
|
||||
for service_user in standalone_users:
|
||||
data.append({
|
||||
'uuid': service_user.uuid,
|
||||
'service_type': service_user.server.service_type.value if service_user.server else None,
|
||||
'server_name': service_user.server.server_nickname if service_user.server else None,
|
||||
'external_username': service_user.external_username,
|
||||
'external_email': service_user.external_email,
|
||||
'avatar_url': service_user.external_avatar_url,
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
71
app/routes/api_v1/users_settings.py
Normal file
71
app/routes/api_v1/users_settings.py
Normal file
@@ -0,0 +1,71 @@
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_required
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType
|
||||
from app.extensions import db
|
||||
from app.utils.helpers import permission_required
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/settings', methods=['GET'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def get_user_settings(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
data = {
|
||||
'uuid': user.uuid,
|
||||
'notes': user.notes,
|
||||
'is_active': user.is_active
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'data': data,
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/settings', methods=['PATCH'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def update_user_settings(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
payload = request.get_json() or {}
|
||||
notes = payload.get('notes')
|
||||
is_active = payload.get('is_active')
|
||||
|
||||
if notes is not None:
|
||||
user.notes = notes
|
||||
if is_active is not None:
|
||||
if user.userType == UserType.OWNER and not is_active:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'CANNOT_DEACTIVATE_OWNER',
|
||||
'message': 'Owner account cannot be deactivated.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
user.is_active = bool(is_active)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'success': True,
|
||||
'notes': user.notes,
|
||||
'is_active': user.is_active
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
190
app/routes/api_v1/users_sync.py
Normal file
190
app/routes/api_v1/users_sync.py
Normal file
@@ -0,0 +1,190 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, current_app
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.api_v1 import bp
|
||||
from app.models import User, UserType, EventType
|
||||
from app.models_media_services import MediaServer
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from app.utils.helpers import permission_required, log_event
|
||||
|
||||
|
||||
def _serialize_basic_user(user: User):
|
||||
return {
|
||||
'uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'user_type': user.userType.value if hasattr(user.userType, 'value') else str(user.userType),
|
||||
'service_account_count': len([child for child in getattr(user, 'linked_children', []) or [] if child.userType == UserType.SERVICE])
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/users/sync-all', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def sync_all_users():
|
||||
"""Sync users from all enabled media servers."""
|
||||
request_id = str(uuid4())
|
||||
|
||||
# Get all active media servers
|
||||
servers = MediaServer.query.filter_by(is_active=True).all()
|
||||
|
||||
if not servers:
|
||||
return jsonify({
|
||||
'data': {
|
||||
'results': [],
|
||||
'message': 'No active media servers found.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
results = []
|
||||
total_added = 0
|
||||
total_updated = 0
|
||||
total_removed = 0
|
||||
|
||||
for server in servers:
|
||||
try:
|
||||
current_app.logger.info(f"Syncing users for server: {server.server_nickname}")
|
||||
sync_result = MediaServiceManager.sync_server_users(server.id)
|
||||
success = bool(sync_result.get('success'))
|
||||
message = sync_result.get('message', 'Sync completed.' if success else 'Sync failed.')
|
||||
|
||||
added = sync_result.get('added', 0)
|
||||
updated = sync_result.get('updated', 0)
|
||||
removed = sync_result.get('removed', 0)
|
||||
|
||||
total_added += added
|
||||
total_updated += updated
|
||||
total_removed += removed
|
||||
|
||||
results.append({
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'service_type': server.service_type.value if hasattr(server.service_type, 'value') else str(server.service_type),
|
||||
'success': success,
|
||||
'added': added,
|
||||
'updated': updated,
|
||||
'removed': removed,
|
||||
'message': message
|
||||
})
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"User sync failed for server {server.id} ({server.server_nickname}): {exc}", exc_info=True)
|
||||
results.append({
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'service_type': server.service_type.value if hasattr(server.service_type, 'value') else str(server.service_type),
|
||||
'success': False,
|
||||
'added': 0,
|
||||
'updated': 0,
|
||||
'removed': 0,
|
||||
'message': str(exc)
|
||||
})
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Manual sync triggered for all servers. Results: {total_added} added, {total_updated} updated, {total_removed} removed.",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'results': results,
|
||||
'summary': {
|
||||
'total_servers': len(servers),
|
||||
'successful': sum(1 for r in results if r['success']),
|
||||
'failed': sum(1 for r in results if not r['success']),
|
||||
'total_added': total_added,
|
||||
'total_updated': total_updated,
|
||||
'total_removed': total_removed
|
||||
}
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/users/<string:user_uuid>/sync', methods=['POST'])
|
||||
@login_required
|
||||
@permission_required('edit_user')
|
||||
def sync_user_accounts(user_uuid):
|
||||
request_id = str(uuid4())
|
||||
user = User.query.filter_by(uuid=user_uuid).first_or_404()
|
||||
|
||||
server_ids = set()
|
||||
|
||||
if user.userType == UserType.SERVICE:
|
||||
if user.server_id:
|
||||
server_ids.add(user.server_id)
|
||||
else:
|
||||
for child in getattr(user, 'linked_children', []) or []:
|
||||
if child.server_id:
|
||||
server_ids.add(child.server_id)
|
||||
|
||||
if not server_ids:
|
||||
return jsonify({
|
||||
'data': {
|
||||
'results': [],
|
||||
'message': 'No associated media servers to sync.'
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
results = []
|
||||
|
||||
for server_id in server_ids:
|
||||
server = MediaServer.query.get(server_id)
|
||||
if not server:
|
||||
results.append({
|
||||
'server_id': server_id,
|
||||
'success': False,
|
||||
'message': 'Server not found.'
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
sync_result = MediaServiceManager.sync_server_users(server.id)
|
||||
success = bool(sync_result.get('success'))
|
||||
message = sync_result.get('message', 'Sync completed.' if success else 'Sync failed.')
|
||||
results.append({
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'success': success,
|
||||
'added': sync_result.get('added'),
|
||||
'updated': sync_result.get('updated'),
|
||||
'removed': sync_result.get('removed'),
|
||||
'message': message
|
||||
})
|
||||
except Exception as exc:
|
||||
current_app.logger.error(f"User sync failed for server {server_id}: {exc}", exc_info=True)
|
||||
results.append({
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'success': False,
|
||||
'message': str(exc)
|
||||
})
|
||||
|
||||
log_event(
|
||||
EventType.SETTING_CHANGE,
|
||||
f"Manual sync triggered for user '{user.localUsername or user.external_username}'.",
|
||||
admin_id=getattr(current_user, 'id', None)
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'data': {
|
||||
'results': results,
|
||||
'user': _serialize_basic_user(user)
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
@@ -1,4 +1,4 @@
|
||||
from flask import Blueprint, render_template, current_app, request
|
||||
from flask import Blueprint, current_app, request, send_from_directory
|
||||
from flask_login import login_required
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from collections import defaultdict
|
||||
@@ -8,6 +8,7 @@ from app.extensions import db
|
||||
from app.utils.helpers import setup_required, permission_required, format_duration
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
import os
|
||||
|
||||
bp = Blueprint('dashboard', __name__)
|
||||
|
||||
@@ -449,143 +450,35 @@ def _generate_admin_streaming_chart_data(days=7):
|
||||
'date_range_days': days
|
||||
}
|
||||
|
||||
def _render_react_spa(sub_path: str = 'dashboard'):
|
||||
"""Serve the compiled React SPA instead of legacy templates."""
|
||||
dist_path = os.path.join(current_app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
|
||||
if not os.path.exists(index_path):
|
||||
current_app.logger.error("React SPA build not found at %s", index_path)
|
||||
current_app.logger.error("Please run: cd frontend && npm run build")
|
||||
return (
|
||||
"<h1>React App Not Built</h1>"
|
||||
"<p>The React admin interface has not been built yet.</p>"
|
||||
"<p>Please run: <code>cd frontend && npm run build</code></p>"
|
||||
), 500
|
||||
|
||||
current_app.logger.debug("Routing admin %s view to React SPA", sub_path)
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
|
||||
|
||||
@bp.route('/')
|
||||
@bp.route('/dashboard')
|
||||
@login_required
|
||||
@setup_required
|
||||
@permission_required('view_dashboard')
|
||||
def index():
|
||||
current_app.logger.info("=== ADMIN DASHBOARD ROUTE START ===")
|
||||
|
||||
current_app.logger.debug("Dashboard: Fetching total users count (local + service users)")
|
||||
|
||||
# Count local users (userType=LOCAL)
|
||||
local_users_count = User.query.filter_by(userType=UserType.LOCAL).count()
|
||||
current_app.logger.debug(f"Dashboard: Local users: {local_users_count}")
|
||||
|
||||
# Count ALL service users (userType=SERVICE records - both standalone AND linked)
|
||||
# This matches the /users page logic which shows each service user as a separate card
|
||||
all_service_users_count = User.query.filter_by(userType=UserType.SERVICE).count()
|
||||
current_app.logger.debug(f"Dashboard: All service users (standalone + linked): {all_service_users_count}")
|
||||
|
||||
# Total managed users (matches /users page logic exactly)
|
||||
total_users = local_users_count + all_service_users_count
|
||||
current_app.logger.debug(f"Dashboard: Total managed users: {total_users}")
|
||||
|
||||
current_app.logger.debug("Dashboard: Fetching active invites count")
|
||||
active_invites_count = Invite.query.filter(
|
||||
Invite.is_active == True,
|
||||
(Invite.expires_at == None) | (Invite.expires_at > db.func.now()), # Use db.func.now() for DB comparison
|
||||
(Invite.max_uses == None) | (Invite.current_uses < Invite.max_uses)
|
||||
).count()
|
||||
current_app.logger.debug(f"Dashboard: Active invites: {active_invites_count}")
|
||||
|
||||
# Get active streams count - Load asynchronously to avoid blocking dashboard
|
||||
current_app.logger.debug("Dashboard: Setting active streams count to 0 for initial load (will be fetched asynchronously)")
|
||||
active_streams_count = 0
|
||||
# NOTE: Active streams will be loaded via HTMX after page load to avoid blocking
|
||||
|
||||
# Server Status Card Logic - Check for cached status first
|
||||
current_app.logger.debug("Dashboard: Getting server list and checking for cached status")
|
||||
all_servers = MediaServiceManager.get_all_servers(active_only=True)
|
||||
server_count = len(all_servers)
|
||||
current_app.logger.debug(f"Dashboard: Found {server_count} servers in database")
|
||||
|
||||
# Check if any servers have never been checked (last_status is None)
|
||||
unchecked_servers = [server for server in all_servers if server.last_status is None]
|
||||
|
||||
if unchecked_servers:
|
||||
current_app.logger.info(f"Dashboard: Found {len(unchecked_servers)} servers that have never been checked - performing automatic first check")
|
||||
# Perform automatic first check for all servers
|
||||
from app.routes.api import get_fresh_server_status
|
||||
server_status_data = get_fresh_server_status()
|
||||
current_app.logger.debug("Dashboard: Automatic first server check completed")
|
||||
else:
|
||||
# Check for stored server status in database
|
||||
from app.routes.api import get_stored_server_status
|
||||
stored_status = get_stored_server_status()
|
||||
|
||||
if stored_status:
|
||||
current_app.logger.debug("Dashboard: Using stored server status from database")
|
||||
server_status_data = stored_status
|
||||
else:
|
||||
current_app.logger.debug("Dashboard: No stored status, showing initial state")
|
||||
# Just pass basic server info for initial load, actual status will be loaded via HTMX
|
||||
server_status_data = {
|
||||
'loading': True,
|
||||
'server_count': server_count,
|
||||
'servers': [{'id': server.id, 'name': server.server_nickname, 'service_type': server.service_type.value} for server in all_servers]
|
||||
}
|
||||
current_app.logger.debug("Dashboard: Server status data prepared")
|
||||
|
||||
current_app.logger.debug("Dashboard: Fetching recent activities")
|
||||
recent_activities = HistoryLog.query.order_by(HistoryLog.timestamp.desc()).limit(10).all()
|
||||
recent_activities_count = HistoryLog.query.count()
|
||||
current_app.logger.debug(f"Dashboard: Recent activities: {len(recent_activities)}, total count: {recent_activities_count}")
|
||||
|
||||
# Generate admin streaming chart data (last 7 days by default)
|
||||
current_app.logger.debug("Dashboard: Generating admin streaming chart data")
|
||||
days_param = request.args.get('days', '7')
|
||||
try:
|
||||
if days_param == 'all':
|
||||
days = -1
|
||||
else:
|
||||
days = int(days_param)
|
||||
if days not in [7, 30, 90]:
|
||||
days = 7
|
||||
except (ValueError, TypeError):
|
||||
days = 7
|
||||
|
||||
chart_data = _generate_admin_streaming_chart_data(days)
|
||||
current_app.logger.debug(f"Dashboard: Chart data generated for {days} days")
|
||||
|
||||
# Get service filters from request
|
||||
service_filters = request.args.getlist('services') # Get list of selected services
|
||||
if not service_filters:
|
||||
service_filters = None # Show all services by default
|
||||
|
||||
# Get available services for the filter dropdown
|
||||
from app.models_media_services import MediaServer, ServiceType
|
||||
available_services = db.session.query(MediaServer.service_type).distinct().all()
|
||||
available_services = [service.service_type for service in available_services]
|
||||
|
||||
# Generate watch statistics data
|
||||
current_app.logger.debug("Dashboard: Generating watch statistics data")
|
||||
watch_statistics_data = _generate_watch_statistics_data(days, service_filters)
|
||||
current_app.logger.debug(f"Dashboard: Watch statistics data generated")
|
||||
|
||||
# Generate top users data
|
||||
current_app.logger.debug("Dashboard: Generating top users data")
|
||||
top_users_data = _generate_top_users_data(days, limit=5)
|
||||
current_app.logger.debug(f"Dashboard: Top users data generated with {len(top_users_data)} users")
|
||||
|
||||
current_app.logger.info("Dashboard: Rendering template with data")
|
||||
current_app.logger.debug(f"Dashboard: Template data - users: {total_users}, invites: {active_invites_count}, streams: {active_streams_count}, servers: {server_count}, activities: {len(recent_activities)}")
|
||||
|
||||
result = render_template('dashboard/index.html',
|
||||
title="Dashboard",
|
||||
total_users=total_users,
|
||||
active_invites_count=active_invites_count,
|
||||
active_streams_count=active_streams_count,
|
||||
server_status=server_status_data,
|
||||
recent_activities=recent_activities,
|
||||
recent_activities_count=recent_activities_count,
|
||||
chart_data=chart_data,
|
||||
watch_statistics_data=watch_statistics_data,
|
||||
top_users_data=top_users_data,
|
||||
selected_days=days,
|
||||
available_services=available_services,
|
||||
selected_services=service_filters or [])
|
||||
|
||||
current_app.logger.info("=== ADMIN DASHBOARD ROUTE COMPLETE ===")
|
||||
return result
|
||||
return _render_react_spa('dashboard')
|
||||
|
||||
@bp.route('/account', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@setup_required
|
||||
@permission_required('manage_general_settings')
|
||||
def account():
|
||||
"""Admin account page - redirects to settings implementation"""
|
||||
# Import here to avoid circular imports
|
||||
from app.routes.settings import account as settings_account_handler
|
||||
return settings_account_handler()
|
||||
return _render_react_spa('account')
|
||||
|
||||
@@ -35,7 +35,7 @@ def plex_oauth_callback():
|
||||
temp_invite_for_redirect = Invite.query.get(invite_id)
|
||||
if temp_invite_for_redirect:
|
||||
invite_path_or_token_for_redirect = temp_invite_for_redirect.custom_path or temp_invite_for_redirect.token
|
||||
|
||||
|
||||
fallback_redirect = url_for('invites.process_invite_form', invite_path_or_token=invite_path_or_token_for_redirect)
|
||||
|
||||
if not invite_id or not pin_code_from_session or not pin_id_from_session or not client_id_from_session:
|
||||
@@ -52,6 +52,10 @@ def plex_oauth_callback():
|
||||
if not invite:
|
||||
flash('Invite not found. Try again.', 'danger')
|
||||
return redirect(url_for('invites.invite_landing_page'))
|
||||
|
||||
return_path = session.get(f'invite_{invite.id}_return_path')
|
||||
if return_path:
|
||||
fallback_redirect = return_path
|
||||
|
||||
try:
|
||||
# Use direct API approach exactly like the sample code
|
||||
@@ -232,6 +236,10 @@ def discord_oauth_callback():
|
||||
current_app.logger.warning(f"Discord OAuth Callback: Invite ID {invite_id_from_session} not found in DB after state check.")
|
||||
return redirect(generic_invite_landing_url)
|
||||
|
||||
return_path = session.get(f'invite_{invite_object_for_redirect.id}_return_path')
|
||||
if return_path:
|
||||
public_invite_page_url_with_path = return_path
|
||||
|
||||
code = request.args.get('code')
|
||||
if not code:
|
||||
error_description = request.args.get("error_description", "Authentication with Discord failed. No authorization code received.")
|
||||
@@ -331,4 +339,4 @@ def discord_oauth_callback():
|
||||
finally:
|
||||
session.pop('discord_oauth_invite_id', None)
|
||||
|
||||
return redirect(public_invite_page_url_with_path)
|
||||
return redirect(public_invite_page_url_with_path)
|
||||
|
||||
@@ -19,413 +19,33 @@ from . import invites_public_bp as invites_bp
|
||||
# Add DISCORD_API_BASE_URL constant
|
||||
DISCORD_API_BASE_URL = 'https://discord.com/api/v10'
|
||||
|
||||
@invites_bp.route('/invite/<invite_path_or_token>', methods=['GET', 'POST'])
|
||||
@setup_required
|
||||
@invites_bp.route('/invite/<invite_path_or_token>', methods=['GET'])
|
||||
@setup_required
|
||||
def process_invite_form(invite_path_or_token):
|
||||
from flask_wtf import FlaskForm
|
||||
"""Serve React SPA for invite acceptance page"""
|
||||
from flask import send_from_directory
|
||||
import os
|
||||
|
||||
# Log invite view for valid invites
|
||||
from app.services import invite_service
|
||||
invite, error_message_from_validation = invite_service.validate_invite_usability(invite_path_or_token)
|
||||
|
||||
if request.method == 'GET' and not error_message_from_validation and invite:
|
||||
invite, error_message = invite_service.validate_invite_usability(invite_path_or_token)
|
||||
if invite and not error_message:
|
||||
log_event(EventType.INVITE_VIEWED, f"Invite '{invite.custom_path or invite.token}' (ID: {invite.id}) viewed/accessed.", invite_id=invite.id)
|
||||
|
||||
if error_message_from_validation:
|
||||
return render_template('invite/steps/index.html', error=error_message_from_validation, invite=None, form=FlaskForm(), discord_sso_is_mandatory=False, show_discord_button=False)
|
||||
# Serve React SPA regardless of invite validity - React will handle validation via API
|
||||
dist_path = os.path.join(current_app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
|
||||
if not invite:
|
||||
flash("The invite link is invalid or no longer available.", "danger")
|
||||
return redirect(url_for('invites.invite_landing_page'))
|
||||
if not os.path.exists(index_path):
|
||||
current_app.logger.error("React SPA build not found at %s", index_path)
|
||||
return (
|
||||
"<h1>React App Not Built</h1>"
|
||||
"<p>The React admin interface has not been built yet.</p>"
|
||||
"<p>Please run: <code>cd frontend && npm run build</code></p>"
|
||||
), 500
|
||||
|
||||
form_instance = FlaskForm()
|
||||
already_authenticated_plex_user_info = session.get(f'invite_{invite.id}_plex_user')
|
||||
already_authenticated_discord_user_info = session.get(f'invite_{invite.id}_discord_user')
|
||||
plex_conflict_info = session.get(f'invite_{invite.id}_plex_conflict')
|
||||
|
||||
# --- MODIFIED: Determine effective Discord settings using invite fields ---
|
||||
oauth_is_generally_enabled = Setting.get_bool('DISCORD_OAUTH_ENABLED', False)
|
||||
|
||||
effective_require_sso = invite.require_discord_auth
|
||||
effective_require_guild = invite.require_discord_guild_membership
|
||||
|
||||
# These settings are fetched for display purposes if guild membership is required
|
||||
setting_discord_guild_id = Setting.get('DISCORD_GUILD_ID')
|
||||
setting_discord_server_invite_url = Setting.get('DISCORD_SERVER_INVITE_URL')
|
||||
show_discord_button = oauth_is_generally_enabled
|
||||
|
||||
# Get server name for display
|
||||
server_name = g.app_name or 'the server'
|
||||
|
||||
# Get all servers for template logic
|
||||
media_service_manager = MediaServiceManager()
|
||||
all_servers = media_service_manager.get_all_servers(active_only=True)
|
||||
|
||||
# Check if there are Plex servers in the invite (needed early for validation)
|
||||
has_plex_servers = any(server.service_type.name.upper() == 'PLEX' for server in invite.servers)
|
||||
|
||||
# Get library information for each server in the invite
|
||||
servers_with_libraries = {}
|
||||
if invite and invite.servers:
|
||||
for server in invite.servers:
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if service:
|
||||
libraries = service.get_libraries()
|
||||
# Build library lookup using the appropriate ID for each service type
|
||||
library_dict = {}
|
||||
for lib in libraries:
|
||||
if server.service_type.value == 'kavita':
|
||||
# For Kavita, use internal_id (UUID) to match stored invite data
|
||||
# Get internal_id from database since API doesn't provide it
|
||||
from app.models_media_services import MediaLibrary
|
||||
db_lib = MediaLibrary.query.filter_by(
|
||||
server_id=server.id,
|
||||
external_id=lib.get('external_id')
|
||||
).first()
|
||||
if db_lib and db_lib.internal_id:
|
||||
library_dict[db_lib.internal_id] = lib['name']
|
||||
else:
|
||||
# For other services, use external_id (their unique identifier)
|
||||
if lib.get('external_id'):
|
||||
library_dict[lib.get('external_id')] = lib['name']
|
||||
|
||||
servers_with_libraries[server.id] = {
|
||||
'server': server,
|
||||
'libraries': library_dict
|
||||
}
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to fetch libraries for server {server.server_nickname}: {e}")
|
||||
servers_with_libraries[server.id] = {'server': server, 'libraries': {}}
|
||||
|
||||
# Check if user accounts are enabled
|
||||
allow_user_accounts = Setting.get_bool('ALLOW_USER_ACCOUNTS', False)
|
||||
user_account_created = session.get(f'invite_{invite.id}_user_account_created', False)
|
||||
|
||||
# Create user account form if needed
|
||||
account_form = None
|
||||
if allow_user_accounts:
|
||||
from app.forms import UserAccountCreationForm
|
||||
account_form = UserAccountCreationForm()
|
||||
|
||||
if request.method == 'POST':
|
||||
auth_method = request.form.get('auth_method')
|
||||
action_taken = request.form.get('action')
|
||||
|
||||
# Handle Plex conflict resolution
|
||||
if action_taken == 'link_plex_account' and plex_conflict_info and plex_conflict_info.get('type') == 'can_link':
|
||||
# User chose to link existing Plex account to local account
|
||||
# Clear the conflict and set the Plex user info to proceed
|
||||
plex_user_data = {
|
||||
'username': plex_conflict_info['plex_username'],
|
||||
'email': plex_conflict_info['plex_email'],
|
||||
# We'll need to get the full Plex account info again
|
||||
}
|
||||
session[f'invite_{invite.id}_plex_user'] = plex_user_data
|
||||
session.pop(f'invite_{invite.id}_plex_conflict', None)
|
||||
flash(f"Plex account '{plex_conflict_info['plex_username']}' will be linked to your local account.", "success")
|
||||
current_app.logger.info(f"User chose to link existing Plex account {plex_conflict_info['plex_username']}")
|
||||
|
||||
elif action_taken == 'use_different_plex' and plex_conflict_info:
|
||||
# User chose to use a different Plex account
|
||||
session.pop(f'invite_{invite.id}_plex_conflict', None)
|
||||
session.pop(f'invite_{invite.id}_plex_user', None)
|
||||
flash("Please authenticate with a different Plex account.", "info")
|
||||
current_app.logger.info(f"User chose to use different Plex account instead of {plex_conflict_info['plex_username']}")
|
||||
|
||||
# Handle user account creation if enabled (MODIFIED: Store form data in session instead of creating account)
|
||||
elif action_taken == 'create_user_account' and allow_user_accounts:
|
||||
from app.forms import UserAccountCreationForm
|
||||
|
||||
account_form = UserAccountCreationForm()
|
||||
if account_form.validate_on_submit():
|
||||
# Store account creation data in session for later use
|
||||
session[f'invite_{invite.id}_user_account_data'] = {
|
||||
'username': account_form.username.data,
|
||||
'email': account_form.email.data,
|
||||
'password': account_form.password.data
|
||||
}
|
||||
|
||||
# Store cross-server credential preferences
|
||||
use_same_username = request.form.get('use_same_username') == 'true'
|
||||
use_same_email = request.form.get('use_same_email') == 'true'
|
||||
use_same_password = request.form.get('use_same_password') == 'true'
|
||||
|
||||
session[f'invite_{invite.id}_cross_server_prefs'] = {
|
||||
'use_same_username': use_same_username,
|
||||
'use_same_email': use_same_email,
|
||||
'use_same_password': use_same_password
|
||||
}
|
||||
|
||||
# Mark account step as completed (but not actually created yet)
|
||||
session[f'invite_{invite.id}_user_account_created'] = True
|
||||
|
||||
flash("Account information saved! Please continue with the authentication steps.", "success")
|
||||
current_app.logger.info(f"User account data stored in session for invite {invite.id}, username: {account_form.username.data}")
|
||||
current_app.logger.info(f"Cross-server preferences: same_username={use_same_username}, same_password={use_same_password}")
|
||||
|
||||
else:
|
||||
# Form validation failed, show errors
|
||||
for field, errors in account_form.errors.items():
|
||||
for error in errors:
|
||||
flash(f"{getattr(account_form, field).label.text}: {error}", "error")
|
||||
|
||||
elif auth_method == 'plex':
|
||||
return redirect(url_for('invites.initiate_plex_auth', invite_id=invite.id))
|
||||
|
||||
elif auth_method == 'discord':
|
||||
return redirect(url_for('invites.initiate_discord_auth', invite_id=invite.id))
|
||||
|
||||
elif action_taken == 'setup_server_access':
|
||||
# Store server-specific credentials in session for later use
|
||||
current_server_id = request.form.get('current_server_id')
|
||||
if current_server_id:
|
||||
# Store the username and password for this specific server
|
||||
server_credentials = {
|
||||
'username': request.form.get('jellyfin_username', ''),
|
||||
'password': request.form.get('jellyfin_password', ''),
|
||||
'email': request.form.get('jellyfin_email', '') # In case email field is added
|
||||
}
|
||||
|
||||
# Store server-specific credentials
|
||||
session[f'invite_{invite.id}_server_{current_server_id}_credentials'] = server_credentials
|
||||
session[f'invite_{invite.id}_server_{current_server_id}_completed'] = True
|
||||
|
||||
current_app.logger.info(f"Stored credentials for server {current_server_id}: username={server_credentials['username']}")
|
||||
flash("Server configuration saved. Complete all steps to create accounts.", "success")
|
||||
else:
|
||||
flash("No server specified for setup.", "error")
|
||||
|
||||
elif action_taken == 'accept_invite':
|
||||
# This is now the "All Servers Configured" step - create local account and all service accounts together
|
||||
if not already_authenticated_plex_user_info and has_plex_servers:
|
||||
flash("Please sign in with Plex first to accept the invite.", "warning")
|
||||
elif effective_require_sso and not already_authenticated_discord_user_info:
|
||||
flash("Discord account linking is required for this invite. Please link your Discord account.", "warning")
|
||||
elif allow_user_accounts and not session.get(f'invite_{invite.id}_user_account_data'):
|
||||
flash("Please complete the account setup step first.", "warning")
|
||||
else:
|
||||
# Create local user account from stored session data if needed
|
||||
user_app_access = None
|
||||
|
||||
# Check if we have stored user account data to create
|
||||
user_account_data = session.get(f'invite_{invite.id}_user_account_data')
|
||||
if user_account_data and allow_user_accounts:
|
||||
try:
|
||||
# Create the local user account now
|
||||
user_app_access = User(
|
||||
userType=UserType.LOCAL,
|
||||
localUsername=user_account_data['username'],
|
||||
email=user_account_data['email'],
|
||||
created_at=utcnow(),
|
||||
used_invite_id=invite.id
|
||||
)
|
||||
user_app_access.set_password(user_account_data['password'])
|
||||
db.session.add(user_app_access)
|
||||
db.session.flush() # Get the ID without committing yet
|
||||
|
||||
current_app.logger.info(f"Created local user account '{user_account_data['username']}' for invite {invite.id}")
|
||||
log_event(EventType.MUM_USER_ADDED_FROM_PLEX, f"Local user account '{user_account_data['username']}' created via invite {invite.id}", invite_id=invite.id)
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Error creating local user account for invite {invite.id}: {e}")
|
||||
flash("Error creating your account. Please try again.", "error")
|
||||
return redirect(url_for('invites.process_invite_form', invite_path_or_token=invite_path_or_token))
|
||||
|
||||
current_app.logger.debug(f"Invite acceptance - User app access: {user_app_access.localUsername if user_app_access else 'None'}")
|
||||
current_app.logger.debug(f"Invite acceptance - Session keys: {list(session.keys())}")
|
||||
|
||||
success, result_object_or_message = invite_service.accept_invite_and_grant_access(
|
||||
invite=invite,
|
||||
plex_user_uuid=already_authenticated_plex_user_info.get('uuid') if already_authenticated_plex_user_info else None,
|
||||
plex_username=already_authenticated_plex_user_info.get('username') if already_authenticated_plex_user_info else None,
|
||||
plex_email=already_authenticated_plex_user_info.get('email') if already_authenticated_plex_user_info else None,
|
||||
plex_thumb=already_authenticated_plex_user_info.get('thumb') if already_authenticated_plex_user_info else None,
|
||||
# Pass the entire dictionary as a single argument
|
||||
discord_user_info=already_authenticated_discord_user_info,
|
||||
ip_address=request.remote_addr,
|
||||
app_user=user_app_access
|
||||
)
|
||||
if success:
|
||||
# Clear session data
|
||||
session.pop(f'invite_{invite.id}_plex_user', None)
|
||||
session.pop(f'invite_{invite.id}_discord_user', None)
|
||||
session.pop(f'invite_{invite.id}_app_user_id', None)
|
||||
session.pop(f'invite_{invite.id}_user_account_created', None)
|
||||
session.pop(f'invite_{invite.id}_user_account_data', None) # Clear stored account data
|
||||
|
||||
# Clear server completion flags and credentials
|
||||
for server in invite.servers:
|
||||
session.pop(f'invite_{invite.id}_server_{server.id}_completed', None)
|
||||
session.pop(f'invite_{invite.id}_server_{server.id}_credentials', None)
|
||||
|
||||
username = user_app_access.localUsername if user_app_access else (already_authenticated_plex_user_info.get('username') if already_authenticated_plex_user_info else 'User')
|
||||
flash(f"Welcome, {username}! All accounts have been created and linked successfully.", "success")
|
||||
return redirect(url_for('invites.invite_success', username=username))
|
||||
else:
|
||||
flash(f"Failed to accept invite: {result_object_or_message}", "danger")
|
||||
|
||||
return redirect(url_for('invites.process_invite_form', invite_path_or_token=invite_path_or_token))
|
||||
|
||||
# Determine if we should use the steps-based template
|
||||
# Use steps if:
|
||||
# - User accounts are enabled (account creation needs to be step 1)
|
||||
# - Discord OAuth is enabled
|
||||
# - Multiple servers are available in this invite
|
||||
has_multiple_servers_available = len(invite.servers) > 1
|
||||
|
||||
# has_plex_servers already defined earlier for validation
|
||||
|
||||
# Get cross-server preferences from session
|
||||
cross_server_prefs = session.get(f'invite_{invite.id}_cross_server_prefs', {})
|
||||
use_same_username = cross_server_prefs.get('use_same_username', False)
|
||||
use_same_email = cross_server_prefs.get('use_same_email', False)
|
||||
use_same_password = cross_server_prefs.get('use_same_password', False)
|
||||
|
||||
# Get user account data for default username and email
|
||||
user_account_data = session.get(f'invite_{invite.id}_user_account_data', {})
|
||||
local_username = user_account_data.get('username', '')
|
||||
local_email = user_account_data.get('email', '')
|
||||
|
||||
# Generate invite steps for progress indicator
|
||||
invite_steps = []
|
||||
current_step = None
|
||||
|
||||
# Step 1: User Account Creation (if enabled)
|
||||
if allow_user_accounts:
|
||||
invite_steps.append({
|
||||
'id': 'user_account',
|
||||
'name': 'Account Details',
|
||||
'icon': 'fa-solid fa-user-plus',
|
||||
'required': True,
|
||||
'completed': user_account_created
|
||||
})
|
||||
|
||||
# Step 2: Discord Authentication (if required)
|
||||
if show_discord_button:
|
||||
invite_steps.append({
|
||||
'id': 'discord',
|
||||
'name': 'Discord Login',
|
||||
'icon': 'fa-brands fa-discord',
|
||||
'required': effective_require_sso,
|
||||
'completed': already_authenticated_discord_user_info is not None
|
||||
})
|
||||
|
||||
# Step 3: Plex Authentication (if there are Plex servers)
|
||||
if has_plex_servers:
|
||||
# Get the first Plex server name for the step title
|
||||
plex_server = next((server for server in invite.servers if server.service_type.name.upper() == 'PLEX'), None)
|
||||
plex_server_name = plex_server.server_nickname if plex_server else 'Plex'
|
||||
|
||||
invite_steps.append({
|
||||
'id': 'plex',
|
||||
'name': f'{plex_server_name} Access',
|
||||
'icon': 'fa-solid fa-right-to-bracket',
|
||||
'required': True,
|
||||
'completed': already_authenticated_plex_user_info is not None
|
||||
})
|
||||
|
||||
# Step 4+: Server Access Steps (for non-Plex servers)
|
||||
# Sort servers to prioritize those without username conflicts
|
||||
non_plex_servers = [s for s in invite.servers if s.service_type.name.upper() != 'PLEX']
|
||||
|
||||
# Check for username conflicts if using same username
|
||||
username_conflicts = {}
|
||||
if use_same_username and local_username:
|
||||
for server in non_plex_servers:
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if hasattr(service, 'check_username_exists'):
|
||||
username_exists = service.check_username_exists(local_username)
|
||||
username_conflicts[server.id] = username_exists
|
||||
current_app.logger.info(f"Username '{local_username}' exists on {server.server_nickname}: {username_exists}")
|
||||
except Exception as e:
|
||||
current_app.logger.warning(f"Could not check username on {server.server_nickname}: {e}")
|
||||
username_conflicts[server.id] = False
|
||||
|
||||
# Sort servers: non-conflicting first, then conflicting
|
||||
def server_sort_key(server):
|
||||
has_conflict = username_conflicts.get(server.id, False)
|
||||
return (has_conflict, server.server_nickname) # False sorts before True
|
||||
|
||||
sorted_non_plex_servers = sorted(non_plex_servers, key=server_sort_key)
|
||||
|
||||
for server in sorted_non_plex_servers:
|
||||
step_id = f'server_access_{server.id}'
|
||||
server_completed = session.get(f'invite_{invite.id}_server_{server.id}_completed', False)
|
||||
invite_steps.append({
|
||||
'id': step_id,
|
||||
'name': f'{server.server_nickname} Access',
|
||||
'icon': 'fa-solid fa-server',
|
||||
'required': True,
|
||||
'completed': server_completed,
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'server_type': server.service_type.name.upper()
|
||||
})
|
||||
|
||||
# Set current step if this server setup is not completed
|
||||
if not server_completed and current_step is None:
|
||||
# Check if prerequisites are met
|
||||
discord_ready = not show_discord_button or already_authenticated_discord_user_info
|
||||
plex_ready = not has_plex_servers or already_authenticated_plex_user_info
|
||||
account_ready = not allow_user_accounts or user_account_created
|
||||
|
||||
if discord_ready and plex_ready and account_ready:
|
||||
current_step = invite_steps[-1] # Set this as current step
|
||||
|
||||
# Always use the steps template for a consistent, modern design
|
||||
# The steps template handles all scenarios properly (user accounts disabled, single server, etc.)
|
||||
|
||||
# Prepare template variables for current step
|
||||
server_username_taken = False
|
||||
preferred_username = ""
|
||||
default_username = ""
|
||||
|
||||
if current_step and current_step.get('server_id'):
|
||||
server_id = current_step['server_id']
|
||||
server_username_taken = username_conflicts.get(server_id, False)
|
||||
|
||||
# Determine default username
|
||||
if use_same_username and local_username:
|
||||
preferred_username = local_username
|
||||
default_username = local_username if not server_username_taken else ""
|
||||
elif already_authenticated_plex_user_info:
|
||||
default_username = already_authenticated_plex_user_info.get('username', '')
|
||||
|
||||
return render_template('invite/steps/index.html',
|
||||
form=form_instance,
|
||||
invite=invite,
|
||||
error=None,
|
||||
invite_path_or_token=invite_path_or_token,
|
||||
# Pass the effective values to the template
|
||||
discord_sso_is_mandatory=effective_require_sso,
|
||||
setting_require_guild_membership=effective_require_guild,
|
||||
show_discord_button=show_discord_button,
|
||||
already_authenticated_plex_user=already_authenticated_plex_user_info,
|
||||
already_authenticated_discord_user=already_authenticated_discord_user_info,
|
||||
setting_discord_guild_id=setting_discord_guild_id,
|
||||
setting_discord_server_invite_url=setting_discord_server_invite_url,
|
||||
server_name=server_name,
|
||||
allow_user_accounts=allow_user_accounts,
|
||||
user_account_created=user_account_created,
|
||||
account_form=account_form,
|
||||
servers_with_libraries=servers_with_libraries,
|
||||
# Add missing variables
|
||||
has_plex_servers=has_plex_servers,
|
||||
invite_steps=invite_steps,
|
||||
current_step=current_step,
|
||||
# Cross-server credential variables
|
||||
use_same_username=use_same_username,
|
||||
use_same_email=use_same_email,
|
||||
use_same_password=use_same_password,
|
||||
server_username_taken=server_username_taken,
|
||||
preferred_username=preferred_username,
|
||||
default_username=default_username,
|
||||
# User account data
|
||||
user_account_data=user_account_data,
|
||||
# Plex conflict variables
|
||||
plex_conflict_info=plex_conflict_info
|
||||
)
|
||||
current_app.logger.debug("Serving React SPA for invite page: %s", invite_path_or_token)
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
|
||||
@invites_bp.route('/success') # Path is /invites/success
|
||||
@setup_required
|
||||
|
||||
5
app/routes/public_api_v1/__init__.py
Normal file
5
app/routes/public_api_v1/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from flask import Blueprint
|
||||
|
||||
bp = Blueprint('public_api_v1', __name__)
|
||||
|
||||
from . import auth, me, invites, invite_wizard # noqa: E402,F401
|
||||
152
app/routes/public_api_v1/auth.py
Normal file
152
app/routes/public_api_v1/auth.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_login import login_user, logout_user, current_user, login_required
|
||||
from sqlalchemy import func
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import User, UserType, Setting, EventType
|
||||
from app.routes.public_api_v1 import bp
|
||||
from app.utils.helpers import get_csrf_token, log_event
|
||||
|
||||
|
||||
def _serialize_portal_user(user: User | None):
|
||||
if not user:
|
||||
return None
|
||||
return {
|
||||
'uuid': user.uuid,
|
||||
'username': user.localUsername or user.external_username,
|
||||
'email': user.email or user.discord_email,
|
||||
'user_type': user.userType.value if hasattr(user.userType, 'value') else str(user.userType),
|
||||
'display_name': getattr(user, 'get_display_name', lambda: None)(),
|
||||
'is_active': getattr(user, 'is_active', True),
|
||||
'force_password_change': getattr(user, 'force_password_change', False)
|
||||
}
|
||||
|
||||
|
||||
def _find_local_user(identifier: str) -> User | None:
|
||||
if not identifier:
|
||||
return None
|
||||
|
||||
lowered = identifier.strip().lower()
|
||||
if not lowered:
|
||||
return None
|
||||
|
||||
candidates = User.query.filter(
|
||||
User.userType.in_([UserType.LOCAL, UserType.OWNER])
|
||||
)
|
||||
|
||||
user = candidates.filter(func.lower(User.localUsername) == lowered).first()
|
||||
if user:
|
||||
return user
|
||||
|
||||
user = candidates.filter(func.lower(User.email) == lowered).first()
|
||||
if user:
|
||||
return user
|
||||
|
||||
return candidates.filter(func.lower(User.discord_email) == lowered).first()
|
||||
|
||||
|
||||
def _session_payload(user: User | None):
|
||||
return {
|
||||
'user': _serialize_portal_user(user),
|
||||
'csrf_token': get_csrf_token(),
|
||||
'force_password_change': getattr(user, 'force_password_change', False) if user else False
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/auth/csrf-token', methods=['GET'])
|
||||
def issue_public_csrf_token():
|
||||
request_id = str(uuid4())
|
||||
token = get_csrf_token()
|
||||
response = jsonify({
|
||||
'data': {'csrf_token': token},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
})
|
||||
response.headers['Cache-Control'] = 'no-store'
|
||||
return response, 200
|
||||
|
||||
|
||||
@bp.route('/auth/login', methods=['POST'])
|
||||
def local_login():
|
||||
request_id = str(uuid4())
|
||||
if not Setting.get_bool('ALLOW_USER_ACCOUNTS', False):
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'USER_ACCOUNTS_DISABLED',
|
||||
'message': 'End-user accounts are disabled.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
username = (payload.get('username') or '').strip()
|
||||
password = payload.get('password') or ''
|
||||
remember = bool(payload.get('remember', False))
|
||||
|
||||
if not username or not password:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_PAYLOAD',
|
||||
'message': 'Username and password are required.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 400
|
||||
|
||||
candidate = _find_local_user(username)
|
||||
if not candidate or not candidate.check_password(password):
|
||||
log_event(EventType.ADMIN_LOGIN_FAIL, f"Failed local login attempt for '{username}'.")
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVALID_CREDENTIALS',
|
||||
'message': 'Invalid username or password.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 401
|
||||
|
||||
if not candidate.is_active:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ACCOUNT_DISABLED',
|
||||
'message': 'Account is disabled.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 403
|
||||
|
||||
login_user(candidate, remember=remember)
|
||||
candidate.last_login_at = datetime.utcnow()
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception:
|
||||
db.session.rollback()
|
||||
|
||||
log_event(EventType.ADMIN_LOGIN_SUCCESS, f"App user '{candidate.localUsername}' logged in.")
|
||||
|
||||
return jsonify({
|
||||
'data': _session_payload(candidate),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/auth/logout', methods=['POST'])
|
||||
@login_required
|
||||
def local_logout():
|
||||
request_id = str(uuid4())
|
||||
actor = _serialize_portal_user(current_user)
|
||||
logout_user()
|
||||
if actor:
|
||||
log_event(EventType.ADMIN_LOGOUT, f"User '{actor.get('username')}' logged out (public API).")
|
||||
return jsonify({
|
||||
'data': {'success': True},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
673
app/routes/public_api_v1/invite_wizard.py
Normal file
673
app/routes/public_api_v1/invite_wizard.py
Normal file
@@ -0,0 +1,673 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
import requests
|
||||
from flask import jsonify, request, session, current_app, url_for, g
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from app.routes.public_api_v1 import bp
|
||||
from app.models import Invite, User, UserType, Setting, EventType
|
||||
from app.extensions import db
|
||||
from app.services import invite_service
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
from app.utils.timezone_utils import utcnow
|
||||
|
||||
|
||||
def _response_meta(request_id: str) -> Dict[str, Any]:
|
||||
return {
|
||||
'request_id': request_id,
|
||||
'generated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
}
|
||||
|
||||
|
||||
def _error_response(request_id: str, status_code: int, code: str, message: str):
|
||||
payload = {
|
||||
'error': {
|
||||
'code': code,
|
||||
'message': message,
|
||||
},
|
||||
'meta': _response_meta(request_id),
|
||||
}
|
||||
return jsonify(payload), status_code
|
||||
|
||||
|
||||
def _get_invite(token_or_path: str) -> Optional[Invite]:
|
||||
return Invite.query.filter(
|
||||
(Invite.token == token_or_path) | (Invite.custom_path == token_or_path)
|
||||
).first()
|
||||
|
||||
|
||||
def _serialize_libraries(library_dict: Dict[str, str]) -> List[Dict[str, str]]:
|
||||
return [
|
||||
{'id': library_id, 'name': library_name}
|
||||
for library_id, library_name in library_dict.items()
|
||||
]
|
||||
|
||||
|
||||
def _server_access_url(server) -> Optional[str]:
|
||||
service_type = server.service_type.name.upper()
|
||||
if service_type == 'PLEX':
|
||||
return "https://app.plex.tv"
|
||||
if hasattr(server, 'url') and server.url:
|
||||
return server.url
|
||||
return None
|
||||
|
||||
|
||||
def _build_invite_state(invite: Invite) -> Dict[str, Any]:
|
||||
prefix = f'invite_{invite.id}_'
|
||||
|
||||
already_authenticated_plex_user = session.get(f'{prefix}plex_user')
|
||||
already_authenticated_discord_user = session.get(f'{prefix}discord_user')
|
||||
plex_conflict_info = session.get(f'{prefix}plex_conflict')
|
||||
|
||||
allow_user_accounts = Setting.get_bool('ALLOW_USER_ACCOUNTS', False)
|
||||
user_account_created = session.get(f'{prefix}user_account_created', False)
|
||||
account_data = session.get(f'{prefix}user_account_data', {}) if allow_user_accounts else {}
|
||||
account_data_sanitized = {
|
||||
'username': account_data.get('username'),
|
||||
'email': account_data.get('email'),
|
||||
'password': account_data.get('password'),
|
||||
} if account_data else None
|
||||
|
||||
cross_server_prefs = session.get(f'{prefix}cross_server_prefs', {}) if allow_user_accounts else {}
|
||||
use_same_username = bool(cross_server_prefs.get('use_same_username'))
|
||||
use_same_email = bool(cross_server_prefs.get('use_same_email'))
|
||||
use_same_password = bool(cross_server_prefs.get('use_same_password'))
|
||||
|
||||
oauth_enabled = Setting.get_bool('DISCORD_OAUTH_ENABLED', False)
|
||||
require_discord_auth = bool(invite.require_discord_auth)
|
||||
require_discord_guild = bool(invite.require_discord_guild_membership)
|
||||
discord_guild_id = Setting.get('DISCORD_GUILD_ID')
|
||||
discord_invite_url = Setting.get('DISCORD_SERVER_INVITE_URL')
|
||||
|
||||
has_plex_servers = any(server.service_type.name.upper() == 'PLEX' for server in invite.servers)
|
||||
servers_with_libraries: Dict[int, Dict[str, Any]] = {}
|
||||
for server in invite.servers or []:
|
||||
libraries = {}
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if service:
|
||||
fetched_libraries = service.get_libraries()
|
||||
for lib in fetched_libraries:
|
||||
if server.service_type.value == 'kavita':
|
||||
from app.models_media_services import MediaLibrary
|
||||
db_library = MediaLibrary.query.filter_by(
|
||||
server_id=server.id,
|
||||
external_id=lib.get('external_id')
|
||||
).first()
|
||||
if db_library and db_library.internal_id:
|
||||
libraries[db_library.internal_id] = lib['name']
|
||||
else:
|
||||
external_id = lib.get('external_id')
|
||||
if external_id:
|
||||
libraries[external_id] = lib['name']
|
||||
except Exception as exc:
|
||||
current_app.logger.error(
|
||||
"Failed to fetch libraries for server %s (%s): %s",
|
||||
server.server_nickname,
|
||||
server.service_type.name,
|
||||
exc,
|
||||
)
|
||||
|
||||
servers_with_libraries[server.id] = {
|
||||
'id': server.id,
|
||||
'name': server.server_nickname,
|
||||
'service_type': server.service_type.name.upper(),
|
||||
'libraries': libraries,
|
||||
'completed': session.get(f'{prefix}server_{server.id}_completed', False),
|
||||
'credentials': session.get(f'{prefix}server_{server.id}_credentials', {}),
|
||||
'access_url': _server_access_url(server),
|
||||
}
|
||||
|
||||
local_username = account_data.get('username') if account_data else ''
|
||||
|
||||
username_conflicts: Dict[int, bool] = {}
|
||||
if use_same_username and local_username:
|
||||
for server in invite.servers:
|
||||
if server.service_type.name.upper() == 'PLEX':
|
||||
continue
|
||||
try:
|
||||
service = MediaServiceFactory.create_service_from_db(server)
|
||||
if hasattr(service, 'check_username_exists'):
|
||||
exists = service.check_username_exists(local_username)
|
||||
username_conflicts[server.id] = bool(exists)
|
||||
else:
|
||||
username_conflicts[server.id] = False
|
||||
except Exception as exc:
|
||||
current_app.logger.warning(
|
||||
"Could not check username on %s: %s",
|
||||
server.server_nickname,
|
||||
exc,
|
||||
)
|
||||
username_conflicts[server.id] = False
|
||||
|
||||
invite_steps: List[Dict[str, Any]] = []
|
||||
if allow_user_accounts:
|
||||
invite_steps.append({
|
||||
'id': 'user_account',
|
||||
'name': 'Account Details',
|
||||
'icon': 'fa-solid fa-user-plus',
|
||||
'required': True,
|
||||
'completed': user_account_created,
|
||||
})
|
||||
|
||||
if oauth_enabled:
|
||||
invite_steps.append({
|
||||
'id': 'discord',
|
||||
'name': 'Discord Login',
|
||||
'icon': 'fa-brands fa-discord',
|
||||
'required': require_discord_auth,
|
||||
'completed': already_authenticated_discord_user is not None,
|
||||
})
|
||||
|
||||
if has_plex_servers:
|
||||
plex_server = next((srv for srv in invite.servers if srv.service_type.name.upper() == 'PLEX'), None)
|
||||
plex_name = plex_server.server_nickname if plex_server else 'Plex'
|
||||
invite_steps.append({
|
||||
'id': 'plex',
|
||||
'name': f'{plex_name} Access',
|
||||
'icon': 'fa-solid fa-right-to-bracket',
|
||||
'required': True,
|
||||
'completed': already_authenticated_plex_user is not None,
|
||||
})
|
||||
|
||||
non_plex_servers = [
|
||||
server for server in invite.servers
|
||||
if server.service_type.name.upper() != 'PLEX'
|
||||
]
|
||||
|
||||
def server_sort_key(server):
|
||||
return (
|
||||
username_conflicts.get(server.id, False),
|
||||
server.server_nickname,
|
||||
)
|
||||
|
||||
for server in sorted(non_plex_servers, key=server_sort_key):
|
||||
invite_steps.append({
|
||||
'id': f'server_access_{server.id}',
|
||||
'name': f'{server.server_nickname} Access',
|
||||
'icon': 'fa-solid fa-server',
|
||||
'required': True,
|
||||
'completed': servers_with_libraries[server.id]['completed'],
|
||||
'server_id': server.id,
|
||||
'server_name': server.server_nickname,
|
||||
'server_type': server.service_type.name.upper(),
|
||||
'username_conflict': username_conflicts.get(server.id, False),
|
||||
})
|
||||
|
||||
next_step_id: Optional[str] = None
|
||||
for step in invite_steps:
|
||||
if step['completed']:
|
||||
continue
|
||||
if step['id'] == 'user_account':
|
||||
next_step_id = step['id']
|
||||
break
|
||||
if step['id'] == 'discord':
|
||||
if allow_user_accounts and not user_account_created:
|
||||
next_step_id = 'user_account'
|
||||
else:
|
||||
next_step_id = step['id']
|
||||
break
|
||||
if step['id'] == 'plex':
|
||||
if allow_user_accounts and not user_account_created:
|
||||
next_step_id = 'user_account'
|
||||
elif oauth_enabled and require_discord_auth and not already_authenticated_discord_user:
|
||||
next_step_id = 'discord'
|
||||
else:
|
||||
next_step_id = step['id']
|
||||
break
|
||||
if step['id'].startswith('server_access_'):
|
||||
if allow_user_accounts and not user_account_created:
|
||||
next_step_id = 'user_account'
|
||||
break
|
||||
if oauth_enabled and require_discord_auth and not already_authenticated_discord_user:
|
||||
next_step_id = 'discord'
|
||||
break
|
||||
if has_plex_servers and not already_authenticated_plex_user:
|
||||
next_step_id = 'plex'
|
||||
break
|
||||
next_step_id = step['id']
|
||||
break
|
||||
|
||||
server_details = []
|
||||
for server_id, info in servers_with_libraries.items():
|
||||
server_details.append({
|
||||
'id': server_id,
|
||||
'name': info['name'],
|
||||
'service_type': info['service_type'],
|
||||
'completed': info['completed'],
|
||||
'credentials': info['credentials'],
|
||||
'libraries': _serialize_libraries(info['libraries']),
|
||||
'username_conflict': username_conflicts.get(server_id, False),
|
||||
'access_url': info.get('access_url'),
|
||||
})
|
||||
|
||||
server_name = getattr(g, 'app_name', None) or Setting.get('APP_NAME', 'the server')
|
||||
|
||||
return {
|
||||
'invite': {
|
||||
'id': invite.id,
|
||||
'token': invite.token,
|
||||
'custom_path': invite.custom_path,
|
||||
'expires_at': invite.expires_at.isoformat() if invite.expires_at else None,
|
||||
'max_uses': invite.max_uses,
|
||||
'current_uses': invite.current_uses,
|
||||
'is_active': invite.is_active,
|
||||
'allow_downloads': invite.allow_downloads,
|
||||
'grant_library_ids': invite.grant_library_ids,
|
||||
'require_discord_auth': require_discord_auth,
|
||||
'require_discord_guild_membership': require_discord_guild,
|
||||
'server_count': len(invite.servers or []),
|
||||
},
|
||||
'steps': invite_steps,
|
||||
'next_step_id': next_step_id,
|
||||
'plex': {
|
||||
'has_plex_servers': has_plex_servers,
|
||||
'authenticated': already_authenticated_plex_user is not None,
|
||||
'user': already_authenticated_plex_user,
|
||||
'conflict': plex_conflict_info,
|
||||
},
|
||||
'discord': {
|
||||
'oauth_enabled': oauth_enabled,
|
||||
'requires_auth': require_discord_auth,
|
||||
'requires_guild': require_discord_guild,
|
||||
'authenticated': already_authenticated_discord_user is not None,
|
||||
'user': already_authenticated_discord_user,
|
||||
'guild_id': discord_guild_id,
|
||||
'invite_url': discord_invite_url,
|
||||
},
|
||||
'account': {
|
||||
'allowed': allow_user_accounts,
|
||||
'completed': bool(user_account_created),
|
||||
'data': account_data_sanitized,
|
||||
'preferences': {
|
||||
'use_same_username': use_same_username,
|
||||
'use_same_email': use_same_email,
|
||||
'use_same_password': use_same_password,
|
||||
}
|
||||
},
|
||||
'servers': server_details,
|
||||
'meta': {
|
||||
'server_label': server_name,
|
||||
'has_multiple_servers': len(invite.servers or []) > 1,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/wizard', methods=['GET'])
|
||||
def get_invite_wizard_state(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({'data': state, 'meta': _response_meta(request_id)}), 200
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/account', methods=['POST'])
|
||||
def save_invite_account(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
if not Setting.get_bool('ALLOW_USER_ACCOUNTS', False):
|
||||
return _error_response(request_id, 400, 'USER_ACCOUNTS_DISABLED', 'Local account creation is disabled for this server.')
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
required_fields = {'username', 'email', 'password', 'confirm_password'}
|
||||
if not required_fields.issubset(payload.keys()):
|
||||
return _error_response(request_id, 400, 'INVALID_PAYLOAD', 'Missing required fields for account creation.')
|
||||
|
||||
from app.forms import UserAccountCreationForm
|
||||
|
||||
form = UserAccountCreationForm(meta={'csrf': False})
|
||||
form.username.data = (payload.get('username') or '').strip()
|
||||
form.email.data = (payload.get('email') or '').strip()
|
||||
form.password.data = payload.get('password') or ''
|
||||
form.confirm_password.data = payload.get('confirm_password') or ''
|
||||
|
||||
if not form.validate():
|
||||
errors = {field: errs for field, errs in form.errors.items()}
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'ACCOUNT_VALIDATION_FAILED',
|
||||
'message': 'Account information is invalid.',
|
||||
'details': errors,
|
||||
},
|
||||
'meta': _response_meta(request_id)
|
||||
}), 400
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
session[f'{prefix}user_account_data'] = {
|
||||
'username': form.username.data,
|
||||
'email': form.email.data,
|
||||
'password': form.password.data,
|
||||
}
|
||||
session[f'{prefix}cross_server_prefs'] = {
|
||||
'use_same_username': bool(payload.get('use_same_username')),
|
||||
'use_same_email': bool(payload.get('use_same_email')),
|
||||
'use_same_password': bool(payload.get('use_same_password')),
|
||||
}
|
||||
session[f'{prefix}user_account_created'] = True
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({'data': state, 'meta': _response_meta(request_id)}), 200
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/server/<int:server_id>/credentials', methods=['POST'])
|
||||
def save_server_credentials(token, server_id):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
if not any(server.id == server_id for server in invite.servers):
|
||||
return _error_response(request_id, 404, 'INVITE_SERVER_NOT_FOUND', 'Server is not attached to this invite.')
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
credentials = {
|
||||
'username': (payload.get('username') or '').strip(),
|
||||
'password': payload.get('password') or '',
|
||||
'email': (payload.get('email') or '').strip(),
|
||||
}
|
||||
mark_completed = payload.get('completed', True)
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
session[f'{prefix}server_{server_id}_credentials'] = credentials
|
||||
session[f'{prefix}server_{server_id}_completed'] = bool(mark_completed)
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({'data': state, 'meta': _response_meta(request_id)}), 200
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/plex/start', methods=['POST'])
|
||||
def start_plex_auth(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
|
||||
return_path = payload.get('return_path')
|
||||
if isinstance(return_path, str) and return_path:
|
||||
session[f'{prefix}return_path'] = return_path
|
||||
|
||||
session['plex_oauth_invite_id'] = invite.id
|
||||
|
||||
try:
|
||||
app_name = Setting.get('APP_NAME', 'MUM')
|
||||
client_id = f"MUM-InvitePlexLink-{str(invite.id)[:8]}"
|
||||
|
||||
pin_response = requests.post(
|
||||
"https://plex.tv/api/v2/pins",
|
||||
headers={"Accept": "application/json"},
|
||||
data={
|
||||
"strong": "true",
|
||||
"X-Plex-Product": app_name,
|
||||
"X-Plex-Client-Identifier": client_id,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if pin_response.status_code != 201:
|
||||
current_app.logger.error(
|
||||
"Plex PIN creation failed for invite %s: %s - %s",
|
||||
invite.id,
|
||||
pin_response.status_code,
|
||||
pin_response.text[:200],
|
||||
)
|
||||
return _error_response(request_id, 502, 'PLEX_PIN_FAILED', 'Could not initiate Plex login.')
|
||||
|
||||
pin_data = pin_response.json()
|
||||
pin_id = pin_data["id"]
|
||||
pin_code = pin_data["code"]
|
||||
|
||||
session['plex_pin_code_invite_flow'] = pin_code
|
||||
session['plex_pin_id_invite_flow'] = pin_id
|
||||
session['plex_client_id_invite_flow'] = client_id
|
||||
session['plex_app_name_invite_flow'] = app_name
|
||||
|
||||
app_base_url = Setting.get('APP_BASE_URL', request.url_root.rstrip('/'))
|
||||
callback_path = url_for('invites.plex_oauth_callback', _external=False)
|
||||
forward_url = f"{app_base_url.rstrip('/')}{callback_path}"
|
||||
|
||||
params = {
|
||||
"clientID": client_id,
|
||||
"code": pin_code,
|
||||
"context[device][product]": app_name,
|
||||
"forwardUrl": forward_url,
|
||||
}
|
||||
encoded = urlencode(params)
|
||||
redirect_url = f"https://app.plex.tv/auth#?{encoded}"
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({
|
||||
'data': {
|
||||
'redirect_url': redirect_url,
|
||||
'state': state,
|
||||
},
|
||||
'meta': _response_meta(request_id),
|
||||
}), 200
|
||||
except Exception as exc:
|
||||
current_app.logger.exception("Failed to start Plex auth for invite %s: %s", invite.id, exc)
|
||||
return _error_response(request_id, 500, 'PLEX_AUTH_ERROR', 'Unexpected error starting Plex authentication.')
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/plex/resolve', methods=['POST'])
|
||||
def resolve_plex_conflict(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
action = payload.get('action')
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
plex_conflict_info = session.get(f'{prefix}plex_conflict')
|
||||
|
||||
if not plex_conflict_info:
|
||||
return _error_response(request_id, 400, 'NO_PLEX_CONFLICT', 'There is no Plex account conflict to resolve.')
|
||||
|
||||
if action == 'link_existing' and plex_conflict_info.get('type') == 'can_link':
|
||||
session[f'{prefix}plex_user'] = {
|
||||
'username': plex_conflict_info['plex_username'],
|
||||
'email': plex_conflict_info.get('plex_email'),
|
||||
}
|
||||
session.pop(f'{prefix}plex_conflict', None)
|
||||
elif action == 'use_different':
|
||||
session.pop(f'{prefix}plex_conflict', None)
|
||||
session.pop(f'{prefix}plex_user', None)
|
||||
else:
|
||||
return _error_response(request_id, 400, 'INVALID_ACTION', 'Unsupported Plex conflict action.')
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({'data': state, 'meta': _response_meta(request_id)}), 200
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/discord/start', methods=['POST'])
|
||||
def start_discord_auth(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
payload = request.get_json(silent=True) or {}
|
||||
|
||||
if not Setting.get_bool('DISCORD_OAUTH_ENABLED', False):
|
||||
return _error_response(request_id, 400, 'DISCORD_OAUTH_DISABLED', 'Discord login is not currently available.')
|
||||
|
||||
client_id = Setting.get('DISCORD_CLIENT_ID')
|
||||
if not client_id:
|
||||
return _error_response(request_id, 400, 'DISCORD_NOT_CONFIGURED', 'Discord integration is not configured.')
|
||||
|
||||
session['discord_oauth_invite_id'] = invite.id
|
||||
state_token = str(uuid4())
|
||||
session['discord_oauth_state_invite'] = state_token
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
return_path = payload.get('return_path')
|
||||
if isinstance(return_path, str) and return_path:
|
||||
session[f'{prefix}return_path'] = return_path
|
||||
|
||||
provided_oauth_url = Setting.get('DISCORD_OAUTH_AUTH_URL')
|
||||
redirect_uri = Setting.get('DISCORD_REDIRECT_URI_INVITE') or url_for('invites.discord_oauth_callback', _external=True)
|
||||
|
||||
if provided_oauth_url:
|
||||
from urllib.parse import urlparse, parse_qs, urlunparse, urlencode
|
||||
|
||||
parsed_url = urlparse(provided_oauth_url)
|
||||
query = parse_qs(parsed_url.query)
|
||||
query['state'] = [state_token]
|
||||
if query.get('redirect_uri', [None])[0] != redirect_uri:
|
||||
query['redirect_uri'] = [redirect_uri]
|
||||
final_query = urlencode(query, doseq=True)
|
||||
redirect_url = urlunparse(parsed_url._replace(query=final_query))
|
||||
else:
|
||||
from urllib.parse import urlencode
|
||||
|
||||
params = {
|
||||
'client_id': client_id,
|
||||
'redirect_uri': redirect_uri,
|
||||
'response_type': 'code',
|
||||
'scope': 'identify email guilds',
|
||||
'state': state_token,
|
||||
}
|
||||
redirect_url = f"https://discord.com/api/v10/oauth2/authorize?{urlencode(params)}"
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({
|
||||
'data': {
|
||||
'redirect_url': redirect_url,
|
||||
'state': state,
|
||||
},
|
||||
'meta': _response_meta(request_id),
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>/complete', methods=['POST'])
|
||||
def complete_invite(token):
|
||||
request_id = str(uuid4())
|
||||
invite = _get_invite(token)
|
||||
if not invite:
|
||||
return _error_response(request_id, 404, 'INVITE_NOT_FOUND', 'Invite not found.')
|
||||
|
||||
prefix = f'invite_{invite.id}_'
|
||||
plex_user = session.get(f'{prefix}plex_user')
|
||||
discord_user = session.get(f'{prefix}discord_user')
|
||||
plex_conflict = session.get(f'{prefix}plex_conflict')
|
||||
|
||||
allow_user_accounts = Setting.get_bool('ALLOW_USER_ACCOUNTS', False)
|
||||
account_created = session.get(f'{prefix}user_account_created', False)
|
||||
account_data = session.get(f'{prefix}user_account_data') if account_created else None
|
||||
|
||||
has_plex_servers = any(server.service_type.name.upper() == 'PLEX' for server in invite.servers)
|
||||
oauth_enabled = Setting.get_bool('DISCORD_OAUTH_ENABLED', False)
|
||||
requires_discord = bool(invite.require_discord_auth)
|
||||
|
||||
if has_plex_servers and not plex_user:
|
||||
return _error_response(request_id, 400, 'PLEX_REQUIRED', 'Please sign in with Plex before completing the invite.')
|
||||
|
||||
if requires_discord and oauth_enabled and not discord_user:
|
||||
return _error_response(request_id, 400, 'DISCORD_REQUIRED', 'Discord account linking is required for this invite.')
|
||||
|
||||
if allow_user_accounts and not account_data:
|
||||
return _error_response(request_id, 400, 'ACCOUNT_REQUIRED', 'Account details must be provided before completing the invite.')
|
||||
|
||||
session_servers_completed = [
|
||||
session.get(f'{prefix}server_{server.id}_completed', False)
|
||||
for server in invite.servers
|
||||
if server.service_type.name.upper() != 'PLEX'
|
||||
]
|
||||
if session_servers_completed and not all(session_servers_completed):
|
||||
return _error_response(request_id, 400, 'SERVERS_INCOMPLETE', 'Please finish configuring all servers before completing the invite.')
|
||||
|
||||
new_local_user: Optional[User] = None
|
||||
|
||||
try:
|
||||
if allow_user_accounts and account_data:
|
||||
new_local_user = User(
|
||||
userType=UserType.LOCAL,
|
||||
localUsername=account_data['username'],
|
||||
email=account_data['email'],
|
||||
created_at=utcnow(),
|
||||
used_invite_id=invite.id,
|
||||
)
|
||||
new_local_user.set_password(account_data['password'])
|
||||
db.session.add(new_local_user)
|
||||
db.session.flush()
|
||||
|
||||
current_app.logger.info(
|
||||
"Created local user account %s for invite %s",
|
||||
account_data['username'],
|
||||
invite.id,
|
||||
)
|
||||
from app.utils.helpers import log_event
|
||||
log_event(
|
||||
EventType.MUM_USER_ADDED_FROM_PLEX,
|
||||
f"Local user account '{account_data['username']}' created via invite {invite.id}",
|
||||
invite_id=invite.id,
|
||||
)
|
||||
|
||||
success, result = invite_service.accept_invite_and_grant_access(
|
||||
invite=invite,
|
||||
plex_user_uuid=plex_user.get('uuid') if plex_user else None,
|
||||
plex_username=plex_user.get('username') if plex_user else None,
|
||||
plex_email=plex_user.get('email') if plex_user else None,
|
||||
plex_thumb=plex_user.get('thumb') if plex_user else None,
|
||||
discord_user_info=discord_user,
|
||||
ip_address=request.remote_addr,
|
||||
app_user=new_local_user,
|
||||
)
|
||||
|
||||
if not success:
|
||||
if new_local_user:
|
||||
db.session.rollback()
|
||||
return _error_response(request_id, 400, 'INVITE_COMPLETION_FAILED', str(result))
|
||||
|
||||
username = (
|
||||
new_local_user.localUsername
|
||||
if new_local_user else
|
||||
(plex_user.get('username') if plex_user else 'User')
|
||||
)
|
||||
configured_servers = [
|
||||
{
|
||||
'name': server.server_nickname,
|
||||
'service_type': server.service_type.name.upper(),
|
||||
'access_url': _server_access_url(server),
|
||||
}
|
||||
for server in invite.servers
|
||||
]
|
||||
|
||||
session.pop(f'{prefix}plex_user', None)
|
||||
session.pop(f'{prefix}discord_user', None)
|
||||
session.pop(f'{prefix}plex_conflict', None)
|
||||
session.pop(f'{prefix}return_path', None)
|
||||
session.pop(f'{prefix}user_account_created', None)
|
||||
session.pop(f'{prefix}user_account_data', None)
|
||||
session.pop(f'{prefix}cross_server_prefs', None)
|
||||
session.pop(f'{prefix}app_user_id', None)
|
||||
for server in invite.servers:
|
||||
session.pop(f'{prefix}server_{server.id}_completed', None)
|
||||
session.pop(f'{prefix}server_{server.id}_credentials', None)
|
||||
|
||||
state = _build_invite_state(invite)
|
||||
return jsonify({
|
||||
'data': {
|
||||
'username': username,
|
||||
'servers': configured_servers,
|
||||
'state': state,
|
||||
},
|
||||
'meta': _response_meta(request_id),
|
||||
}), 200
|
||||
except Exception as exc:
|
||||
current_app.logger.exception("Failed to complete invite %s: %s", invite.id, exc)
|
||||
db.session.rollback()
|
||||
return _error_response(request_id, 500, 'INVITE_COMPLETION_ERROR', 'Unexpected error completing the invite.')
|
||||
41
app/routes/public_api_v1/invites.py
Normal file
41
app/routes/public_api_v1/invites.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify
|
||||
|
||||
from app.routes.public_api_v1 import bp
|
||||
from app.models import Invite
|
||||
|
||||
|
||||
def _serialize_invite(invite: Invite):
|
||||
return {
|
||||
'token': invite.token,
|
||||
'custom_path': invite.custom_path,
|
||||
'expires_at': invite.expires_at.isoformat() if invite.expires_at else None,
|
||||
'max_uses': invite.max_uses,
|
||||
'current_uses': invite.current_uses,
|
||||
'is_active': invite.is_active,
|
||||
'grant_library_ids': invite.grant_library_ids,
|
||||
'allow_downloads': invite.allow_downloads,
|
||||
}
|
||||
|
||||
|
||||
@bp.route('/public/invite/<token>', methods=['GET'])
|
||||
def validate_public_invite(token):
|
||||
request_id = str(uuid4())
|
||||
invite = Invite.query.filter((Invite.token == token) | (Invite.custom_path == token)).first()
|
||||
if not invite:
|
||||
return jsonify({
|
||||
'error': {
|
||||
'code': 'INVITE_NOT_FOUND',
|
||||
'message': 'Invite not found.'
|
||||
},
|
||||
'meta': {'request_id': request_id}
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'data': _serialize_invite(invite),
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
24
app/routes/public_api_v1/me.py
Normal file
24
app/routes/public_api_v1/me.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import jsonify
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.routes.public_api_v1 import bp
|
||||
from .auth import _serialize_portal_user
|
||||
from app.utils.helpers import get_csrf_token
|
||||
|
||||
|
||||
@bp.route('/me', methods=['GET'])
|
||||
@login_required
|
||||
def current_portal_user():
|
||||
request_id = str(uuid4())
|
||||
return jsonify({
|
||||
'data': {
|
||||
'user': _serialize_portal_user(current_user),
|
||||
'csrf_token': get_csrf_token()
|
||||
},
|
||||
'meta': {
|
||||
'request_id': request_id,
|
||||
'deprecated': False
|
||||
}
|
||||
}), 200
|
||||
@@ -1,9 +1,10 @@
|
||||
from flask import Blueprint, render_template, request, current_app, flash, redirect, url_for
|
||||
from flask import Blueprint, request, current_app, flash, redirect, url_for, send_from_directory
|
||||
from flask_login import login_required, current_user
|
||||
from app.utils.helpers import setup_required, permission_required
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from app.services.media_service_factory import MediaServiceFactory
|
||||
from app.models import User, UserType, Setting
|
||||
import os
|
||||
|
||||
bp = Blueprint('streaming', __name__)
|
||||
|
||||
@@ -34,10 +35,20 @@ def index():
|
||||
streaming_refresh_interval_seconds = default_interval
|
||||
|
||||
current_app.logger.debug(f"Streaming page will use refresh interval: {streaming_refresh_interval_seconds} seconds.")
|
||||
|
||||
return render_template('streaming/index.html',
|
||||
title="Active Streams",
|
||||
streaming_refresh_interval=streaming_refresh_interval_seconds)
|
||||
|
||||
dist_path = os.path.join(current_app.root_path, 'static', 'dist')
|
||||
index_path = os.path.join(dist_path, 'index.html')
|
||||
|
||||
if not os.path.exists(index_path):
|
||||
current_app.logger.error("React SPA build not found at %s", index_path)
|
||||
return (
|
||||
"<h1>React App Not Built</h1>"
|
||||
"<p>The React admin interface has not been built yet.</p>"
|
||||
"<p>Please run: <code>cd frontend && npm run build</code></p>"
|
||||
), 500
|
||||
|
||||
current_app.logger.debug("Serving React SPA for streaming page")
|
||||
return send_from_directory(dist_path, 'index.html')
|
||||
|
||||
@bp.route('/streaming/partial')
|
||||
@login_required
|
||||
@@ -192,4 +203,4 @@ def sessions_partial():
|
||||
else:
|
||||
return render_template('streaming/_partials/sessions.html',
|
||||
sessions=active_sessions_data,
|
||||
summary_stats=summary_stats)
|
||||
summary_stats=summary_stats)
|
||||
|
||||
89
app/routes/websockets.py
Normal file
89
app/routes/websockets.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""WebSocket endpoints for real-time updates"""
|
||||
from flask import Blueprint, current_app
|
||||
from flask_socketio import emit, join_room, leave_room
|
||||
from flask_login import current_user
|
||||
from app.extensions import socketio
|
||||
from app.models import User, UserType
|
||||
from functools import wraps
|
||||
|
||||
bp = Blueprint('websockets', __name__)
|
||||
|
||||
|
||||
def authenticated_only(f):
|
||||
"""Decorator to require authentication for WebSocket events"""
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
if not current_user.is_authenticated:
|
||||
return False
|
||||
return f(*args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
@socketio.on('connect')
|
||||
def handle_connect():
|
||||
"""Handle client connection"""
|
||||
if not current_user.is_authenticated:
|
||||
current_app.logger.warning("Unauthenticated WebSocket connection attempt")
|
||||
return False
|
||||
|
||||
current_app.logger.info(f"WebSocket client connected: {current_user.get_display_name()}")
|
||||
emit('connected', {'message': 'Connected to MUM WebSocket server'})
|
||||
|
||||
|
||||
@socketio.on('disconnect')
|
||||
def handle_disconnect():
|
||||
"""Handle client disconnection"""
|
||||
if current_user.is_authenticated:
|
||||
current_app.logger.info(f"WebSocket client disconnected: {current_user.get_display_name()}")
|
||||
|
||||
|
||||
@socketio.on('subscribe_streaming')
|
||||
@authenticated_only
|
||||
def handle_subscribe_streaming():
|
||||
"""Subscribe to streaming updates"""
|
||||
if not current_user.has_permission('view_streaming'):
|
||||
current_app.logger.warning(f"User {current_user.get_display_name()} attempted to subscribe to streaming without permission")
|
||||
return False
|
||||
|
||||
join_room('streaming_updates')
|
||||
current_app.logger.info(f"User {current_user.get_display_name()} subscribed to streaming updates")
|
||||
emit('subscribed', {'channel': 'streaming_updates'})
|
||||
|
||||
|
||||
@socketio.on('unsubscribe_streaming')
|
||||
@authenticated_only
|
||||
def handle_unsubscribe_streaming():
|
||||
"""Unsubscribe from streaming updates"""
|
||||
leave_room('streaming_updates')
|
||||
current_app.logger.info(f"User {current_user.get_display_name()} unsubscribed from streaming updates")
|
||||
emit('unsubscribed', {'channel': 'streaming_updates'})
|
||||
|
||||
|
||||
def broadcast_streaming_update(active_count, summary_data=None, live_services=None):
|
||||
"""
|
||||
Broadcast streaming updates to all subscribed clients.
|
||||
This is called from the task_service when session monitoring completes.
|
||||
|
||||
Args:
|
||||
active_count: Number of active streaming sessions
|
||||
summary_data: Optional summary data (for dashboard card)
|
||||
live_services: Optional iterable of service types delivering live updates (e.g., websocket-backed)
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
|
||||
payload = {
|
||||
'active_count': active_count,
|
||||
'timestamp': datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
if summary_data:
|
||||
payload['summary'] = summary_data
|
||||
|
||||
if live_services is not None:
|
||||
try:
|
||||
payload['live_services'] = sorted({str(service).lower() for service in live_services})
|
||||
except TypeError:
|
||||
payload['live_services'] = []
|
||||
|
||||
socketio.emit('streaming_update', payload, room='streaming_updates', namespace='/')
|
||||
current_app.logger.debug(f"Broadcasted streaming update: {active_count} active sessions")
|
||||
@@ -9,6 +9,7 @@ from typing import List, Dict, Any, Optional, Tuple
|
||||
from flask import current_app
|
||||
from app.services.base_media_service import BaseMediaService
|
||||
from app.models_media_services import ServiceType
|
||||
from app.models import User, UserType
|
||||
from app.utils.timeout_helper import get_api_timeout_with_fallback
|
||||
|
||||
|
||||
@@ -489,12 +490,21 @@ class JellyfinMediaService(BaseMediaService):
|
||||
# Handle Jellyfin thumbnails
|
||||
thumb_url = None
|
||||
item_id = now_playing.get('Id')
|
||||
series_image_tag = now_playing.get('SeriesPrimaryImageTag') or (now_playing.get('ImageTags') or {}).get('SeriesPrimary')
|
||||
primary_image_tag = now_playing.get('PrimaryImageTag') or (now_playing.get('ImageTags') or {}).get('Primary')
|
||||
|
||||
if item_id:
|
||||
# For episodes, prefer series poster; for movies, use primary image
|
||||
if media_type == 'Episode' and now_playing.get('SeriesId'):
|
||||
thumb_url = url_for('api.jellyfin_image_proxy', item_id=now_playing.get('SeriesId'), image_type='Primary')
|
||||
params = {'item_id': now_playing.get('SeriesId'), 'image_type': 'Primary'}
|
||||
if series_image_tag:
|
||||
params['image_tag'] = series_image_tag
|
||||
thumb_url = url_for('api.jellyfin_image_proxy', **params)
|
||||
else:
|
||||
thumb_url = url_for('api.jellyfin_image_proxy', item_id=item_id, image_type='Primary')
|
||||
params = {'item_id': item_id, 'image_type': 'Primary'}
|
||||
if primary_image_tag:
|
||||
params['image_tag'] = primary_image_tag
|
||||
thumb_url = url_for('api.jellyfin_image_proxy', **params)
|
||||
|
||||
is_transcoding = play_state.get('PlayMethod') == 'Transcode'
|
||||
|
||||
@@ -984,4 +994,4 @@ class JellyfinMediaService(BaseMediaService):
|
||||
def get_geoip_info(self, ip_address: str) -> Dict[str, Any]:
|
||||
"""Get GeoIP information for a given IP address"""
|
||||
# Use the base class implementation
|
||||
return super().get_geoip_info(ip_address)
|
||||
return super().get_geoip_info(ip_address)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# File: app/services/media_service_manager.py
|
||||
from typing import List, Dict, Any, Optional
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Union
|
||||
from flask import current_app
|
||||
from app.models_media_services import MediaServer, MediaLibrary, ServiceType
|
||||
from app.models import User, UserType, Setting
|
||||
@@ -622,13 +622,52 @@ class MediaServiceManager:
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def get_all_active_sessions() -> List[Dict[str, Any]]:
|
||||
"""Get active sessions from all servers"""
|
||||
current_app.logger.warning("MediaServiceManager.get_all_active_sessions() called - THIS MAKES API CALLS TO ALL SERVERS")
|
||||
def get_all_active_sessions(
|
||||
service_types: Optional[Iterable[Union[ServiceType, str]]] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get active sessions from all (or selected) servers"""
|
||||
type_filter: Optional[Set[ServiceType]] = None
|
||||
if service_types is not None:
|
||||
type_filter = set()
|
||||
for svc_type in service_types:
|
||||
if isinstance(svc_type, ServiceType):
|
||||
type_filter.add(svc_type)
|
||||
elif isinstance(svc_type, str):
|
||||
try:
|
||||
type_filter.add(ServiceType(svc_type.lower()))
|
||||
except ValueError:
|
||||
current_app.logger.warning(
|
||||
"MediaServiceManager: Ignoring unknown service type '%s' in get_all_active_sessions filter",
|
||||
svc_type,
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"MediaServiceManager: Unsupported service type filter value %s (%s)",
|
||||
svc_type,
|
||||
type(svc_type),
|
||||
)
|
||||
|
||||
if type_filter:
|
||||
current_app.logger.warning(
|
||||
"MediaServiceManager.get_all_active_sessions() called for service types: %s - THIS MAKES API CALLS TO MATCHING SERVERS",
|
||||
", ".join(sorted(t.value for t in type_filter)),
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"MediaServiceManager.get_all_active_sessions() called with no filter - THIS MAKES API CALLS TO ALL SERVERS"
|
||||
)
|
||||
|
||||
all_sessions = []
|
||||
|
||||
servers = MediaServiceManager.get_all_servers()
|
||||
current_app.logger.debug(f"MediaServiceManager: Found {len(servers)} servers to check for active sessions")
|
||||
if type_filter:
|
||||
servers = [server for server in servers if server.service_type in type_filter]
|
||||
|
||||
current_app.logger.debug(
|
||||
"MediaServiceManager: Found %d servers to check for active sessions (filter=%s)",
|
||||
len(servers),
|
||||
", ".join(sorted(t.value for t in type_filter)) if type_filter else "none",
|
||||
)
|
||||
|
||||
for server in servers:
|
||||
current_app.logger.warning(f"MediaServiceManager: Making API call to server '{server.server_nickname}' ({server.service_type.value}) at {server.url}")
|
||||
@@ -745,4 +784,4 @@ class MediaServiceManager:
|
||||
if str(lib_id) not in converted_ids: # Avoid duplicates
|
||||
converted_ids.append(str(lib_id)) # Fallback to original
|
||||
|
||||
return converted_ids
|
||||
return converted_ids
|
||||
|
||||
214
app/services/plex_websocket_monitor.py
Normal file
214
app/services/plex_websocket_monitor.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""Background Plex WebSocket monitor that triggers session processing on real-time events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import current_app
|
||||
from websocket import WebSocketApp
|
||||
|
||||
from app.models_media_services import MediaServer, ServiceType
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from app.services.task_service import _run_media_session_monitor
|
||||
|
||||
_MONITOR_INSTANCE: Optional["PlexWebsocketMonitor"] = None
|
||||
_MONITOR_LOCK = threading.Lock()
|
||||
|
||||
|
||||
class PlexWebsocketMonitor:
|
||||
"""Maintain WebSocket listeners for each configured Plex server."""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
self.stop_event = threading.Event()
|
||||
self.lock = threading.Lock()
|
||||
self.threads: Dict[int, threading.Thread] = {}
|
||||
self.last_refresh_at: Dict[int, float] = {}
|
||||
self.logger = app.logger
|
||||
|
||||
def start(self) -> None:
|
||||
"""Spin up listeners for all active Plex servers."""
|
||||
with self.app.app_context():
|
||||
servers = MediaServiceManager.get_servers_by_type(ServiceType.PLEX, active_only=True)
|
||||
if not servers:
|
||||
self.logger.info("PlexWebsocketMonitor: No active Plex servers found; WebSocket listener not started.")
|
||||
return
|
||||
|
||||
self.logger.info(
|
||||
"PlexWebsocketMonitor: Starting WebSocket listeners for %d Plex server(s).",
|
||||
len(servers),
|
||||
)
|
||||
|
||||
for server in servers:
|
||||
self._start_for_server(server.id)
|
||||
|
||||
def _start_for_server(self, server_id: int) -> None:
|
||||
with self.lock:
|
||||
thread = self.threads.get(server_id)
|
||||
if thread and thread.is_alive():
|
||||
return
|
||||
|
||||
thread = threading.Thread(
|
||||
target=self._run_for_server,
|
||||
args=(server_id,),
|
||||
name=f"plex-ws-{server_id}",
|
||||
daemon=True,
|
||||
)
|
||||
self.threads[server_id] = thread
|
||||
thread.start()
|
||||
|
||||
def stop(self) -> None:
|
||||
self.stop_event.set()
|
||||
|
||||
def _run_for_server(self, server_id: int) -> None:
|
||||
backoff = 5
|
||||
max_backoff = 60
|
||||
|
||||
while not self.stop_event.is_set():
|
||||
with self.app.app_context():
|
||||
server = MediaServer.query.get(server_id)
|
||||
if not server or not server.is_active:
|
||||
self.logger.info(
|
||||
"PlexWebsocketMonitor: Server %s inactive or missing; retrying in 60s.",
|
||||
server_id,
|
||||
)
|
||||
if self.stop_event.wait(60):
|
||||
return
|
||||
continue
|
||||
|
||||
if not server.api_key or not server.url:
|
||||
self.logger.warning(
|
||||
"PlexWebsocketMonitor: Server %s missing URL or API token; retrying in 60s.",
|
||||
server.server_nickname,
|
||||
)
|
||||
if self.stop_event.wait(60):
|
||||
return
|
||||
continue
|
||||
|
||||
ws_url = self._build_websocket_url(server)
|
||||
headers = self._build_headers(server)
|
||||
|
||||
try:
|
||||
self.logger.debug(
|
||||
"PlexWebsocketMonitor: Connecting to %s for server %s",
|
||||
ws_url,
|
||||
server_id,
|
||||
)
|
||||
|
||||
ws_app = WebSocketApp(
|
||||
ws_url,
|
||||
header=headers,
|
||||
on_message=lambda ws_sock, msg: self._handle_message(server_id, msg),
|
||||
on_error=lambda ws_sock, err: self._handle_error(server_id, err),
|
||||
on_close=lambda ws_sock, status_code, msg: self._handle_close(server_id, status_code, msg),
|
||||
)
|
||||
|
||||
sslopt = {}
|
||||
if ws_url.startswith("wss://"):
|
||||
sslopt = {"cert_reqs": ssl.CERT_NONE}
|
||||
|
||||
ws_app.run_forever(sslopt=sslopt, ping_interval=30, ping_timeout=10)
|
||||
backoff = 5 # reset backoff after a successful run
|
||||
except Exception as exc: # pragma: no cover - Ws library exceptions
|
||||
with self.app.app_context():
|
||||
self.logger.error(
|
||||
"PlexWebsocketMonitor: Unexpected error for server %s: %s",
|
||||
server_id,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
if self.stop_event.wait(backoff):
|
||||
return
|
||||
backoff = min(backoff * 2, max_backoff)
|
||||
|
||||
with self.app.app_context():
|
||||
current_app.logger.info("PlexWebsocketMonitor: Stop signal received for server %s", server_id)
|
||||
|
||||
def _handle_message(self, server_id: int, message) -> None:
|
||||
text = message.decode("utf-8", errors="ignore") if isinstance(message, bytes) else str(message)
|
||||
now = time.time()
|
||||
last_refresh = self.last_refresh_at.get(server_id, 0)
|
||||
if now - last_refresh < 2:
|
||||
return
|
||||
self.last_refresh_at[server_id] = now
|
||||
|
||||
with self.app.app_context():
|
||||
current_app.logger.debug(
|
||||
"PlexWebsocketMonitor: Message from server %s (truncated): %s",
|
||||
server_id,
|
||||
text[:200],
|
||||
)
|
||||
try:
|
||||
_run_media_session_monitor(
|
||||
include_service_types={ServiceType.PLEX},
|
||||
source=f"plex-websocket:{server_id}",
|
||||
live_service_types={ServiceType.PLEX},
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
current_app.logger.error(
|
||||
"PlexWebsocketMonitor: Failed to process Plex WebSocket event for server %s: %s",
|
||||
server_id,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def _handle_error(self, server_id: int, error) -> None:
|
||||
with self.app.app_context():
|
||||
current_app.logger.warning(
|
||||
"PlexWebsocketMonitor: WebSocket error on server %s: %s",
|
||||
server_id,
|
||||
error,
|
||||
)
|
||||
|
||||
def _handle_close(self, server_id: int, status_code, message) -> None:
|
||||
with self.app.app_context():
|
||||
current_app.logger.info(
|
||||
"PlexWebsocketMonitor: WebSocket closed for server %s (code=%s, message=%s)",
|
||||
server_id,
|
||||
status_code,
|
||||
message,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _build_websocket_url(server: MediaServer) -> str:
|
||||
base_url = server.url.rstrip('/')
|
||||
parsed = urlparse(base_url)
|
||||
scheme = 'wss' if parsed.scheme == 'https' else 'ws'
|
||||
netloc = parsed.netloc or parsed.path
|
||||
path = parsed.path.rstrip('/')
|
||||
if not netloc:
|
||||
raise ValueError(f"Invalid Plex server URL: {server.url}")
|
||||
if path:
|
||||
path = f"{path}/:/websockets/notifications"
|
||||
else:
|
||||
path = "/:/websockets/notifications"
|
||||
token_query = f"?X-Plex-Token={server.api_key}"
|
||||
return f"{scheme}://{netloc}{path}{token_query}"
|
||||
|
||||
@staticmethod
|
||||
def _build_headers(server: MediaServer) -> List[str]:
|
||||
return [
|
||||
f"X-Plex-Token: {server.api_key}",
|
||||
"X-Plex-Client-Identifier: mum-admin",
|
||||
"X-Plex-Device: MUM",
|
||||
"X-Plex-Device-Name: MUM",
|
||||
"X-Plex-Product: Multimedia User Manager",
|
||||
"X-Plex-Version: 1.0",
|
||||
]
|
||||
|
||||
|
||||
def start_plex_websocket_monitor(app) -> None:
|
||||
"""Start the Plex WebSocket monitor if it isn't running yet."""
|
||||
global _MONITOR_INSTANCE
|
||||
with _MONITOR_LOCK:
|
||||
if _MONITOR_INSTANCE is None:
|
||||
monitor = PlexWebsocketMonitor(app)
|
||||
_MONITOR_INSTANCE = monitor
|
||||
monitor.start()
|
||||
else:
|
||||
app.logger.debug("PlexWebsocketMonitor: Already active; skipping start.")
|
||||
@@ -1,148 +1,328 @@
|
||||
# File: app/services/task_service.py
|
||||
from flask import current_app
|
||||
from app.extensions import scheduler
|
||||
from app.extensions import scheduler
|
||||
from app.models import Setting, EventType, User, UserType
|
||||
from app.models_media_services import ServiceType, MediaStreamHistory
|
||||
from app.models_media_services import MediaServer, MediaStreamHistory, ServiceType
|
||||
from app.utils.helpers import log_event
|
||||
from . import user_service # user_service is needed for deleting users
|
||||
from . import user_service # user_service is needed for deleting users
|
||||
from app.services.media_service_manager import MediaServiceManager
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from app.extensions import db
|
||||
from typing import Any, Dict, Iterable, Optional, Set, Union
|
||||
|
||||
_active_stream_sessions = {}
|
||||
# session_key -> {'history_id': int, 'service_type': str, 'server_id': Optional[int]}
|
||||
_active_stream_sessions: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
def _normalize_service_type_set(
|
||||
types: Optional[Iterable[Union[ServiceType, str]]]
|
||||
) -> Optional[Set[ServiceType]]:
|
||||
if types is None:
|
||||
return None
|
||||
normalized: Set[ServiceType] = set()
|
||||
for svc_type in types:
|
||||
if isinstance(svc_type, ServiceType):
|
||||
normalized.add(svc_type)
|
||||
elif isinstance(svc_type, str):
|
||||
try:
|
||||
normalized.add(ServiceType(svc_type.lower()))
|
||||
except ValueError:
|
||||
current_app.logger.warning(
|
||||
"task_service: Ignoring unknown service type '%s' in filter",
|
||||
svc_type,
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"task_service: Unsupported service type filter value %s (%s)",
|
||||
svc_type,
|
||||
type(svc_type),
|
||||
)
|
||||
return normalized
|
||||
|
||||
def _get_total_tracked_session_count() -> int:
|
||||
return len(_active_stream_sessions)
|
||||
|
||||
# --- Scheduled Tasks ---
|
||||
|
||||
def monitor_media_sessions_task():
|
||||
"""
|
||||
Statefully monitors media sessions from all services (Plex, Jellyfin, etc.), with corrected session tracking and duration calculation.
|
||||
- Creates a new MediaStreamHistory record when a new session starts.
|
||||
- Continuously updates the view offset (progress) on the SAME record for an ongoing session.
|
||||
- Correctly calculates final playback duration from the last known viewOffset when the session stops.
|
||||
- Enforces "No 4K Transcoding" user setting with improved detection.
|
||||
Background scheduler entry point. Delegates to _run_media_session_monitor with the standard
|
||||
configuration (exclude Plex, which is handled by the WebSocket monitor).
|
||||
"""
|
||||
with scheduler.app.app_context():
|
||||
_run_media_session_monitor(
|
||||
source="scheduler",
|
||||
exclude_service_types={ServiceType.PLEX},
|
||||
)
|
||||
|
||||
def _run_media_session_monitor(
|
||||
include_service_types: Optional[Iterable[Union[ServiceType, str]]] = None,
|
||||
exclude_service_types: Optional[Iterable[Union[ServiceType, str]]] = None,
|
||||
source: str = "manual",
|
||||
live_service_types: Optional[Iterable[Union[ServiceType, str]]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Core logic for processing active media sessions, parameterised so it can be invoked from the
|
||||
APScheduler job as well as real-time WebSocket monitors (e.g. Plex).
|
||||
"""
|
||||
global _active_stream_sessions
|
||||
with scheduler.app.app_context():
|
||||
current_app.logger.info("=== MEDIA SESSION MONITOR TASK STARTING ===")
|
||||
|
||||
# Check for any active media servers from the database
|
||||
all_servers = MediaServiceManager.get_all_servers(active_only=True)
|
||||
current_app.logger.debug(f"Found {len(all_servers)} active media servers in database")
|
||||
|
||||
for server in all_servers:
|
||||
current_app.logger.debug(f"Server - Name: {server.server_nickname}, Type: {server.service_type.value}, Active: {server.is_active}")
|
||||
|
||||
if not all_servers:
|
||||
current_app.logger.warning("No active media servers configured in the database. Skipping task.")
|
||||
return
|
||||
|
||||
try:
|
||||
# This gets sessions from all active servers (Plex, Jellyfin, etc.)
|
||||
current_app.logger.debug("Calling MediaServiceManager.get_all_active_sessions()...")
|
||||
active_sessions = MediaServiceManager.get_all_active_sessions()
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
current_app.logger.debug(f"Retrieved {len(active_sessions)} active sessions from MediaServiceManager")
|
||||
|
||||
if len(active_sessions) == 0:
|
||||
current_app.logger.debug("No active sessions found - this could be normal if no one is streaming")
|
||||
else:
|
||||
current_app.logger.debug(f"Active sessions details:")
|
||||
for i, session in enumerate(active_sessions):
|
||||
if isinstance(session, dict):
|
||||
current_app.logger.debug(f" Session {i+1}: Jellyfin session ID {session.get('Id', 'unknown')}")
|
||||
else:
|
||||
current_app.logger.debug(f" Session {i+1}: Plex session key {getattr(session, 'sessionKey', 'unknown')}")
|
||||
|
||||
current_app.logger.info(f"Found {len(active_sessions)} active sessions across all servers.")
|
||||
source_label = source.upper()
|
||||
current_app.logger.info("=== MEDIA SESSION MONITOR (%s) STARTING ===", source_label)
|
||||
|
||||
# Handle both Plex and Jellyfin session formats
|
||||
current_sessions_dict = {}
|
||||
for session in active_sessions:
|
||||
# Extract session key based on session type
|
||||
target_include = _normalize_service_type_set(include_service_types)
|
||||
target_exclude = _normalize_service_type_set(exclude_service_types)
|
||||
live_service_filter = _normalize_service_type_set(live_service_types)
|
||||
|
||||
# Check for any active media servers from the database
|
||||
all_servers = MediaServiceManager.get_all_servers(active_only=True)
|
||||
target_servers = []
|
||||
for server in all_servers:
|
||||
include_server = True
|
||||
if target_include is not None:
|
||||
include_server = server.service_type in target_include
|
||||
if include_server and target_exclude is not None:
|
||||
include_server = server.service_type not in target_exclude
|
||||
if include_server:
|
||||
target_servers.append(server)
|
||||
|
||||
current_app.logger.debug(
|
||||
"[%s] Considering %d/%d active media servers for monitoring",
|
||||
source_label,
|
||||
len(target_servers),
|
||||
len(all_servers),
|
||||
)
|
||||
|
||||
for server in target_servers:
|
||||
current_app.logger.debug(
|
||||
"[%s] Server - Name: %s, Type: %s, Active: %s",
|
||||
source_label,
|
||||
server.server_nickname,
|
||||
server.service_type.value,
|
||||
server.is_active,
|
||||
)
|
||||
|
||||
if not target_servers:
|
||||
current_app.logger.warning(
|
||||
"[%s] No matching active media servers configured in the database. Skipping monitor.",
|
||||
source_label,
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
target_service_types = {server.service_type for server in target_servers}
|
||||
live_services_payload = (
|
||||
sorted(service.value for service in live_service_filter)
|
||||
if live_service_filter
|
||||
else []
|
||||
)
|
||||
|
||||
# This gets sessions from the targeted servers
|
||||
current_app.logger.debug(
|
||||
"[%s] Calling MediaServiceManager.get_all_active_sessions() for %s",
|
||||
source_label,
|
||||
", ".join(sorted(s.value for s in target_service_types)),
|
||||
)
|
||||
active_sessions = MediaServiceManager.get_all_active_sessions(target_service_types)
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
current_app.logger.debug(
|
||||
"[%s] Retrieved %d active sessions from MediaServiceManager",
|
||||
source_label,
|
||||
len(active_sessions),
|
||||
)
|
||||
|
||||
if not active_sessions:
|
||||
current_app.logger.debug(
|
||||
"[%s] No active sessions found - this could be normal if no one is streaming",
|
||||
source_label,
|
||||
)
|
||||
else:
|
||||
current_app.logger.debug("[%s] Active sessions details:", source_label)
|
||||
for index, session in enumerate(active_sessions, start=1):
|
||||
if isinstance(session, dict):
|
||||
# Jellyfin session (dict format)
|
||||
session_key = session.get('Id')
|
||||
current_app.logger.debug(
|
||||
" Session %d: Jellyfin session ID %s",
|
||||
index,
|
||||
session.get('Id', 'unknown'),
|
||||
)
|
||||
else:
|
||||
# Plex session (object format)
|
||||
session_key = getattr(session, 'sessionKey', None)
|
||||
|
||||
if session_key:
|
||||
current_sessions_dict[session_key] = session
|
||||
else:
|
||||
session_type = "Jellyfin" if isinstance(session, dict) else "Plex"
|
||||
current_app.logger.warning(f"Session missing key: {session_type} - {type(session)}")
|
||||
|
||||
current_session_keys = set(current_sessions_dict.keys())
|
||||
current_app.logger.debug(
|
||||
" Session %d: Plex session key %s",
|
||||
index,
|
||||
getattr(session, 'sessionKey', 'unknown'),
|
||||
)
|
||||
|
||||
# Step 1: Check for stopped streams
|
||||
stopped_session_keys = set(_active_stream_sessions.keys()) - current_session_keys
|
||||
if stopped_session_keys:
|
||||
current_app.logger.info(f"Found {len(stopped_session_keys)} stopped sessions: {list(stopped_session_keys)}")
|
||||
for session_key in stopped_session_keys:
|
||||
stream_history_id = _active_stream_sessions.pop(session_key, None)
|
||||
current_app.logger.info(
|
||||
"[%s] Found %d active sessions across monitored servers.",
|
||||
source_label,
|
||||
len(active_sessions),
|
||||
)
|
||||
|
||||
# Handle both Plex and Jellyfin session formats
|
||||
current_sessions_dict: Dict[str, Any] = {}
|
||||
session_service_map: Dict[str, str] = {}
|
||||
session_server_map: Dict[str, Optional[int]] = {}
|
||||
target_service_type_values = {svc.value for svc in target_service_types}
|
||||
server_lookup = {server.id: server for server in target_servers}
|
||||
|
||||
for session in active_sessions:
|
||||
if isinstance(session, dict):
|
||||
session_key = session.get('Id')
|
||||
service_type_value = session.get('service_type') or ServiceType.JELLYFIN.value
|
||||
server_id = session.get('server_id')
|
||||
else:
|
||||
session_key = getattr(session, 'sessionKey', None)
|
||||
service_type_attr = getattr(session, 'service_type', None)
|
||||
if isinstance(service_type_attr, ServiceType):
|
||||
service_type_value = service_type_attr.value
|
||||
elif isinstance(service_type_attr, str):
|
||||
service_type_value = service_type_attr
|
||||
else:
|
||||
service_type_value = ServiceType.PLEX.value
|
||||
server_id = getattr(session, 'server_id', None)
|
||||
|
||||
if session_key:
|
||||
session_key_str = str(session_key)
|
||||
current_sessions_dict[session_key_str] = session
|
||||
session_service_map[session_key_str] = str(service_type_value).lower()
|
||||
if isinstance(server_id, int):
|
||||
session_server_map[session_key_str] = server_id
|
||||
elif isinstance(server_id, str) and server_id.isdigit():
|
||||
session_server_map[session_key_str] = int(server_id)
|
||||
else:
|
||||
session_server_map[session_key_str] = None
|
||||
else:
|
||||
session_type = "Jellyfin" if isinstance(session, dict) else "Plex"
|
||||
current_app.logger.warning(
|
||||
"[%s] Session missing key: %s - %s",
|
||||
source_label,
|
||||
session_type,
|
||||
type(session),
|
||||
)
|
||||
|
||||
current_session_keys = set(current_sessions_dict.keys())
|
||||
|
||||
# Step 1: Check for stopped streams (only for monitored service types)
|
||||
tracked_keys_in_scope = {
|
||||
key
|
||||
for key, meta in _active_stream_sessions.items()
|
||||
if meta.get('service_type') in target_service_type_values
|
||||
}
|
||||
stopped_session_keys = tracked_keys_in_scope - current_session_keys
|
||||
if stopped_session_keys:
|
||||
current_app.logger.info(
|
||||
"[%s] Found %d stopped sessions: %s",
|
||||
source_label,
|
||||
len(stopped_session_keys),
|
||||
list(stopped_session_keys),
|
||||
)
|
||||
for session_key in stopped_session_keys:
|
||||
session_meta = _active_stream_sessions.pop(session_key, None)
|
||||
if session_meta:
|
||||
stream_history_id = session_meta.get('history_id')
|
||||
if stream_history_id:
|
||||
history_record = db.session.get(MediaStreamHistory, stream_history_id)
|
||||
if history_record and not history_record.stopped_at:
|
||||
final_duration = history_record.view_offset_at_end_seconds
|
||||
history_record.duration_seconds = final_duration if final_duration and final_duration > 0 else 0
|
||||
history_record.duration_seconds = (
|
||||
final_duration if final_duration and final_duration > 0 else 0
|
||||
)
|
||||
history_record.stopped_at = now_utc
|
||||
current_app.logger.info(f"DURATION DEBUG: Session {session_key} stopped - view_offset_at_end_seconds: {history_record.view_offset_at_end_seconds}s, final duration_seconds: {history_record.duration_seconds}s")
|
||||
current_app.logger.info(f"Marked session {session_key} (DB ID: {stream_history_id}) as stopped. Final duration: {history_record.duration_seconds}s.")
|
||||
current_app.logger.info(
|
||||
"[%s] DURATION DEBUG: Session %s stopped - view_offset_at_end_seconds: %ss, final duration_seconds: %ss",
|
||||
source_label,
|
||||
session_key,
|
||||
history_record.view_offset_at_end_seconds,
|
||||
history_record.duration_seconds,
|
||||
)
|
||||
current_app.logger.info(
|
||||
"[%s] Marked session %s (DB ID: %s) as stopped. Final duration: %ss.",
|
||||
source_label,
|
||||
session_key,
|
||||
stream_history_id,
|
||||
history_record.duration_seconds,
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(f"Could not find or already stopped history record for DB ID {stream_history_id}")
|
||||
|
||||
# Step 2: Check for new and ongoing streams
|
||||
if not current_sessions_dict:
|
||||
current_app.logger.info("No new or ongoing sessions to process.")
|
||||
else:
|
||||
current_app.logger.info(f"Processing {len(current_sessions_dict)} new or ongoing sessions...")
|
||||
current_app.logger.warning(
|
||||
"[%s] Could not find or already stopped history record for DB ID %s",
|
||||
source_label,
|
||||
stream_history_id,
|
||||
)
|
||||
else:
|
||||
current_app.logger.debug(
|
||||
"[%s] Session metadata missing when attempting to stop session %s",
|
||||
source_label,
|
||||
session_key,
|
||||
)
|
||||
|
||||
# Step 2: Check for new and ongoing streams
|
||||
if not current_sessions_dict:
|
||||
current_app.logger.info("[%s] No new or ongoing sessions to process.", source_label)
|
||||
else:
|
||||
current_app.logger.info(
|
||||
"[%s] Processing %d new or ongoing sessions...",
|
||||
source_label,
|
||||
len(current_sessions_dict),
|
||||
)
|
||||
|
||||
# Import MediaServer for both Plex and Jellyfin session handling
|
||||
from app.models_media_services import MediaServer
|
||||
|
||||
for session_key, session in current_sessions_dict.items():
|
||||
service_type_value = session_service_map.get(session_key, ServiceType.PLEX.value)
|
||||
try:
|
||||
service_type_enum = ServiceType(service_type_value)
|
||||
except ValueError:
|
||||
service_type_enum = ServiceType.JELLYFIN if isinstance(session, dict) else ServiceType.PLEX
|
||||
|
||||
server_id = session_server_map.get(session_key)
|
||||
current_server = None
|
||||
if server_id is not None:
|
||||
current_server = server_lookup.get(server_id)
|
||||
if not current_server:
|
||||
current_server = next(
|
||||
(srv for srv in target_servers if srv.service_type == service_type_enum),
|
||||
None,
|
||||
)
|
||||
|
||||
if not current_server:
|
||||
current_app.logger.warning(
|
||||
"[%s] Could not determine media server for session %s (service=%s). Skipping.",
|
||||
source_label,
|
||||
session_key,
|
||||
service_type_enum.value,
|
||||
)
|
||||
continue
|
||||
|
||||
# Handle different session formats for user lookup
|
||||
mum_user = None
|
||||
user_media_access = None
|
||||
|
||||
if isinstance(session, dict):
|
||||
# Jellyfin session - look up by username
|
||||
if service_type_enum == ServiceType.JELLYFIN and isinstance(session, dict):
|
||||
jellyfin_username = session.get('UserName')
|
||||
if jellyfin_username:
|
||||
# Find service user for Jellyfin username on the correct server
|
||||
jellyfin_server = MediaServer.query.filter_by(service_type=ServiceType.JELLYFIN).first()
|
||||
if jellyfin_server:
|
||||
user_media_access = User.query.filter_by(userType=UserType.SERVICE).filter_by(
|
||||
server_id=jellyfin_server.id,
|
||||
external_username=jellyfin_username
|
||||
).first()
|
||||
if user_media_access:
|
||||
# Check if it's linked to a local user account
|
||||
current_app.logger.debug(f"LINKED: Found service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id})")
|
||||
current_app.logger.debug(f"LINKED: linkedUserId = {user_media_access.linkedUserId}")
|
||||
current_app.logger.debug(f"LINKED: external_username = {user_media_access.external_username}")
|
||||
current_app.logger.debug(f"LINKED: server = {user_media_access.server.server_nickname}")
|
||||
|
||||
# In unified model, get linked user via linkedUserId
|
||||
mum_user = None
|
||||
if user_media_access.linkedUserId:
|
||||
mum_user = User.query.filter_by(userType=UserType.LOCAL, uuid=user_media_access.linkedUserId).first()
|
||||
current_app.logger.debug(f"LINKED: linked user = {mum_user}")
|
||||
|
||||
if not mum_user:
|
||||
current_app.logger.info(f"Found standalone service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id}). Processing as standalone user.")
|
||||
else:
|
||||
current_app.logger.info(f"Found linked service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id}) linked to local user (ID: {mum_user.id}, username: {mum_user.localUsername}). Processing as linked user.")
|
||||
user_media_access = User.query.filter_by(userType=UserType.SERVICE).filter_by(
|
||||
server_id=current_server.id,
|
||||
external_username=jellyfin_username
|
||||
).first()
|
||||
if user_media_access:
|
||||
current_app.logger.debug(f"LINKED: Found service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id})")
|
||||
current_app.logger.debug(f"LINKED: linkedUserId = {user_media_access.linkedUserId}")
|
||||
current_app.logger.debug(f"LINKED: external_username = {user_media_access.external_username}")
|
||||
current_app.logger.debug(f"LINKED: server = {user_media_access.server.server_nickname}")
|
||||
|
||||
mum_user = None
|
||||
if user_media_access.linkedUserId:
|
||||
mum_user = User.query.filter_by(userType=UserType.LOCAL, uuid=user_media_access.linkedUserId).first()
|
||||
current_app.logger.debug(f"LINKED: linked user = {mum_user}")
|
||||
|
||||
if not mum_user:
|
||||
current_app.logger.info(f"Found standalone service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id}). Processing as standalone user.")
|
||||
else:
|
||||
current_app.logger.warning(f"No service user found for Jellyfin username '{jellyfin_username}' on server '{jellyfin_server.server_nickname}'. Skipping session.")
|
||||
continue
|
||||
current_app.logger.info(f"Found linked service user for Jellyfin username '{jellyfin_username}' (ID: {user_media_access.id}) linked to local user (ID: {mum_user.id}, username: {mum_user.localUsername}). Processing as linked user.")
|
||||
else:
|
||||
current_app.logger.warning(f"No Jellyfin server configured. Skipping session {session_key}.")
|
||||
current_app.logger.warning(f"No service user found for Jellyfin username '{jellyfin_username}' on server '{current_server.server_nickname}'. Skipping session.")
|
||||
continue
|
||||
else:
|
||||
current_app.logger.warning(f"Jellyfin session {session_key} is missing UserName. Skipping.")
|
||||
continue
|
||||
else:
|
||||
elif service_type_enum == ServiceType.PLEX:
|
||||
# Plex session - look up by user ID via service user
|
||||
user_id_from_session = None
|
||||
|
||||
@@ -160,7 +340,7 @@ def monitor_media_sessions_task():
|
||||
|
||||
if user_id_from_session:
|
||||
# Look up user by external_user_id in service user for Plex server
|
||||
plex_server = MediaServer.query.filter_by(service_type=ServiceType.PLEX).first()
|
||||
plex_server = current_server
|
||||
if plex_server:
|
||||
user_media_access = User.query.filter_by(userType=UserType.SERVICE).filter_by(
|
||||
server_id=plex_server.id,
|
||||
@@ -192,6 +372,14 @@ def monitor_media_sessions_task():
|
||||
else:
|
||||
current_app.logger.warning(f"Could not extract user ID from Plex session {session_key}. Skipping.")
|
||||
continue
|
||||
else:
|
||||
current_app.logger.debug(
|
||||
"[%s] Unsupported service type %s for session %s. Skipping.",
|
||||
source_label,
|
||||
service_type_enum.value,
|
||||
session_key,
|
||||
)
|
||||
continue
|
||||
|
||||
# Process session for user
|
||||
|
||||
@@ -264,20 +452,6 @@ def monitor_media_sessions_task():
|
||||
# Try alternative fields that might contain library information
|
||||
library_name = now_playing.get('ChannelName', None) or now_playing.get('CollectionType', None)
|
||||
|
||||
# Determine which server this session belongs to
|
||||
if isinstance(session, dict):
|
||||
# Jellyfin session - find Jellyfin server
|
||||
jellyfin_server = MediaServer.query.filter_by(service_type=ServiceType.JELLYFIN).first()
|
||||
current_server = jellyfin_server
|
||||
else:
|
||||
# Plex session - find Plex server
|
||||
plex_server = MediaServer.query.filter_by(service_type=ServiceType.PLEX).first()
|
||||
current_server = plex_server
|
||||
|
||||
if not current_server:
|
||||
current_app.logger.warning(f"Could not find server for session {session_key}. Skipping.")
|
||||
continue
|
||||
|
||||
# Safety check to ensure we have either a linked user or standalone user
|
||||
if not mum_user and not user_media_access:
|
||||
current_app.logger.warning(f"No user found for session {session_key}. Skipping.")
|
||||
@@ -325,14 +499,22 @@ def monitor_media_sessions_task():
|
||||
current_app.logger.debug(f"About to flush database session...")
|
||||
db.session.flush() # Flush to get the ID
|
||||
|
||||
_active_stream_sessions[session_key] = new_history_record.id
|
||||
_active_stream_sessions[session_key] = {
|
||||
"history_id": new_history_record.id,
|
||||
"service_type": service_type_enum.value,
|
||||
"server_id": current_server.id if current_server else None,
|
||||
}
|
||||
current_app.logger.debug(f"Successfully created MediaStreamHistory record (ID: {new_history_record.id}) for session {session_key}.")
|
||||
current_app.logger.debug(f"Added session {session_key} to _active_stream_sessions tracking")
|
||||
|
||||
# If the session is ongoing, update its progress
|
||||
else:
|
||||
current_app.logger.debug(f"Updating existing session {session_key}")
|
||||
history_record_id = _active_stream_sessions.get(session_key)
|
||||
session_meta = _active_stream_sessions.get(session_key)
|
||||
if session_meta is not None:
|
||||
session_meta["server_id"] = current_server.id if current_server else session_meta.get("server_id")
|
||||
session_meta["service_type"] = service_type_enum.value
|
||||
history_record_id = session_meta.get("history_id") if session_meta else None
|
||||
if history_record_id:
|
||||
current_app.logger.debug(f"Found history record ID {history_record_id} for session {session_key}")
|
||||
history_record = db.session.get(MediaStreamHistory, history_record_id)
|
||||
@@ -371,11 +553,26 @@ def monitor_media_sessions_task():
|
||||
current_app.logger.debug("About to commit all database changes...")
|
||||
db.session.commit()
|
||||
current_app.logger.debug("Database commit successful!")
|
||||
current_app.logger.info("=== MEDIA SESSION MONITOR TASK FINISHED ===")
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(f"Fatal error in monitor_plex_sessions_task: {e}", exc_info=True)
|
||||
|
||||
# Broadcast WebSocket update with current active session count
|
||||
try:
|
||||
from app.routes.websockets import broadcast_streaming_update
|
||||
active_count = _get_total_tracked_session_count()
|
||||
broadcast_streaming_update(active_count, live_services=live_services_payload)
|
||||
current_app.logger.debug(f"Broadcasted WebSocket update: {active_count} active sessions")
|
||||
except Exception as ws_error:
|
||||
current_app.logger.warning(f"Failed to broadcast WebSocket update: {ws_error}")
|
||||
|
||||
current_app.logger.info("=== MEDIA SESSION MONITOR (%s) FINISHED ===", source_label)
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error(
|
||||
"Fatal error in media session monitor (%s): %s",
|
||||
source_label,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def check_user_access_expirations_task():
|
||||
"""
|
||||
|
||||
@@ -119,42 +119,6 @@ body {
|
||||
@apply font-sans;
|
||||
}
|
||||
|
||||
/* Default state (no .htmx-request on button) */
|
||||
button:not(.htmx-request) > .sync-loader.htmx-indicator {
|
||||
display: none !important; /* Loader hidden */
|
||||
}
|
||||
button:not(.htmx-request) > .sync-icon.default-icon-state {
|
||||
display: inline-flex !important; /* Icon shown */
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
/* Loading state (button has .htmx-request) */
|
||||
button.htmx-request > .sync-loader.htmx-indicator {
|
||||
display: inline-block !important; /* Loader shown */
|
||||
}
|
||||
button.htmx-request > .sync-icon.default-icon-state {
|
||||
display: none !important; /* Icon hidden */
|
||||
}
|
||||
|
||||
/* The .sync-text span has no specific rules here, so it should always inherit
|
||||
its default display (inline for a span) or be influenced by the parent button's flex properties.
|
||||
DaisyUI's .btn class usually makes children align nicely.
|
||||
*/
|
||||
|
||||
/* --- General HTMX Indicator Rules (can be kept as fallbacks for other indicators) --- */
|
||||
.htmx-indicator {
|
||||
display: none; /* Start transparent for any transition effects */
|
||||
/* display:none; /* This general rule can sometimes be too broad; let specific rules above control button children */
|
||||
}
|
||||
.htmx-request .htmx-indicator{
|
||||
display: inline-block;
|
||||
/* display:inline-block; /* Again, let specific rules handle display for button children */
|
||||
}
|
||||
.htmx-request.htmx-indicator{ /* If the element IS the indicator and has htmx-request */
|
||||
display: inline-block;
|
||||
}
|
||||
/* --- End General --- */
|
||||
|
||||
/* --- Custom Animations & Utilities --- */
|
||||
|
||||
/* 1. Define Keyframes (standard CSS, Tailwind will include this) */
|
||||
|
||||
File diff suppressed because one or more lines are too long
BIN
app/static/favicon.ico
Normal file
BIN
app/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 70 B |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user