add structlog

This commit is contained in:
Markbeep
2025-07-01 19:52:43 +02:00
parent ec4d905bd9
commit d734b4f12d
19 changed files with 819 additions and 817 deletions

View File

@@ -24,7 +24,7 @@ RUN chmod +x /bin/tailwindcss
COPY --from=0 /app/node_modules/ node_modules/
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
COPY uv.lock pyproject.toml /app
COPY uv.lock pyproject.toml /app/
RUN uv sync --frozen --no-cache
COPY alembic/ alembic/

View File

@@ -27,31 +27,7 @@ target_metadata = models.BaseModel.metadata
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
def run_migrations() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
@@ -72,7 +48,4 @@ def run_migrations_online() -> None:
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations()

View File

@@ -11,6 +11,7 @@ from sqlmodel import Session, select
from app.internal.auth.config import LoginTypeEnum, auth_config
from app.internal.models import GroupEnum, User
from app.util.db import get_session
from app.util.log import logger
class DetailedUser(User):
@@ -34,6 +35,11 @@ def raise_for_invalid_password(
min_password_length = auth_config.get_min_password_length(session)
if not len(password) >= min_password_length:
logger.warning(
"Password does not meet minimum length requirement",
min_length=min_password_length,
actual_length=len(password),
)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Password must be at least {min_password_length} characters long",

View File

@@ -1,5 +1,4 @@
import asyncio
import logging
import time
from datetime import datetime
from typing import Any, Literal, Optional
@@ -10,8 +9,7 @@ from aiohttp import ClientSession
from sqlmodel import Session, col, select
from app.internal.models import BookRequest
logger = logging.getLogger(__name__)
from app.util.log import logger
REFETCH_TTL = 60 * 60 * 24 * 7 # 1 week
@@ -56,7 +54,10 @@ async def _get_audnexus_book(
) as response:
if not response.ok:
logger.warning(
f"Failed to fetch book with ASIN {asin} from Audnexus: {response.status}: {response.reason}"
"Failed to fetch book from Audnexus",
asin=asin,
status=response.status,
reason=response.reason,
)
return None
book = await response.json()
@@ -85,7 +86,10 @@ async def _get_audimeta_book(
) as response:
if not response.ok:
logger.warning(
f"Failed to fetch book with ASIN {asin} from Audimeta: {response.status}: {response.reason}"
"Failed to fetch book from Audimeta",
asin=asin,
status=response.status,
reason=response.reason,
)
return None
book = await response.json()
@@ -112,9 +116,7 @@ async def get_book_by_asin(
book = await _get_audnexus_book(session, asin, audible_region)
if book:
return book
logger.warning(
f"Failed to fetch book with ASIN {asin} from both Audnexus and Audimeta. "
)
logger.warning("Failed to fetch book", asin=asin, region=audible_region)
class CacheQuery(pydantic.BaseModel, frozen=True):

View File

@@ -1,12 +1,10 @@
import logging
from typing import Any, Optional
from pydantic import BaseModel
from sqlmodel import Session
from app.util.cache import StringConfigCache
logger = logging.getLogger(__name__)
from app.util.log import logger
class IndexerConfiguration[T: (str, int, bool, float, None)](BaseModel):
@@ -82,7 +80,7 @@ def create_valued_configuration(
configurations = vars(config)
for key, _value in configurations.items():
if not isinstance(_value, IndexerConfiguration):
logger.debug("Skipping %s", key)
logger.debug("Skipping key", key=key)
continue
value: IndexerConfiguration[Any] = _value # pyright: ignore[reportUnknownVariableType]

View File

@@ -1,8 +1,7 @@
import logging
from typing import Any, cast
from pydantic import BaseModel
from app.internal.indexers.indexers import indexers
from app.internal.indexers.abstract import AbstractIndexer, SessionContainer
from app.internal.indexers.configuration import (
ConfigurationException,
@@ -10,8 +9,8 @@ from app.internal.indexers.configuration import (
ValuedConfigurations,
create_valued_configuration,
)
logger = logging.getLogger(__name__)
from app.internal.indexers.indexers import indexers
from app.util.log import logger
class IndexerContext(BaseModel, arbitrary_types_allowed=True):
@@ -46,7 +45,7 @@ async def get_indexer_contexts(
if not return_disabled and not await indexer.is_active(
container, valued_configuration
):
logger.debug("Indexer %s is disabled", Indexer.name)
logger.debug("Indexer is disabled", name=Indexer.name)
continue
contexts.append(
@@ -57,6 +56,10 @@ async def get_indexer_contexts(
)
)
except ConfigurationException as e:
logger.error("Failed to get configurations for %s: %s", Indexer.name, e)
logger.error(
"Failed to get configurations for Indexer",
name=Indexer.name,
error=str(e),
)
return contexts

View File

@@ -1,5 +1,4 @@
import json
import logging
from typing import Any
from urllib.parse import urlencode, urljoin
@@ -16,8 +15,7 @@ from app.internal.models import (
BookRequest,
ProwlarrSource,
)
logger = logging.getLogger(__name__)
from app.util.log import logger
class MamConfigurations(Configurations):
@@ -86,20 +84,22 @@ class MamIndexer(AbstractIndexer[MamConfigurations]):
url, cookies={"mam_id": session_id}
) as response:
if response.status == 403:
logger.error("Mam: Failed to authenticate: %s", await response.text())
logger.error(
"Mam: Failed to authenticate", response=await response.text()
)
return
if not response.ok:
logger.error("Mam: Failed to query: %s", await response.text())
logger.error("Mam: Failed to query", response=await response.text())
return
search_results = await response.json()
if "error" in search_results:
logger.error("Mam: Error in response: %s", search_results["error"])
logger.error("Mam: Error in response", error=search_results["error"])
return
for result in search_results["data"]:
self.results[str(result["id"])] = result
logger.info("Mam: Retrieved %d results", len(self.results))
logger.info("Mam: Retrieved results", results_amount=len(self.results))
async def is_matching_source(
self,

View File

@@ -1,5 +1,4 @@
import json
import logging
from typing import Optional
from aiohttp import ClientSession
@@ -14,8 +13,7 @@ from app.internal.models import (
)
from app.util import json_type
from app.util.db import open_session
logger = logging.getLogger(__name__)
from app.util.log import logger
def replace_variables(
@@ -70,7 +68,9 @@ async def _send(
for key, value in additional_fields.items():
if key in json_body.keys():
logger.warning(
f"Key '{key}' already exists in the JSON body but is passed as additional field. Overwriting with value: {value}"
"Key already exists in JSON body. Overwriting with value.",
key=key,
value=value,
)
json_body[key] = value
@@ -135,7 +135,10 @@ async def send_notification(
)
logger.info(
f"Sending notification to {notification.url} with title: '{title}', event type: {notification.event.value}"
"Sending notification",
url=notification.url,
title=title,
event_type=notification.event.value,
)
async with ClientSession() as client_session:
@@ -206,14 +209,17 @@ async def send_manual_notification(
)
logger.info(
f"Sending manual notification to {notification.url} with title: '{title}', event type: {notification.event.value}"
"Sending manual notification",
url=notification.url,
title=title,
event_type=notification.event.value,
)
async with ClientSession() as client_session:
await _send(title, body, additional_fields, notification, client_session)
except Exception as e:
logger.error("Failed to send notification", e)
logger.error("Failed to send notification", error=str(e))
return None

View File

@@ -1,5 +1,4 @@
import json
import logging
import posixpath
from datetime import datetime
from typing import Any, Literal, Optional
@@ -21,8 +20,7 @@ from app.internal.models import (
from app.internal.notifications import send_all_notifications
from app.internal.prowlarr.source_metadata import edit_source_metadata
from app.util.cache import SimpleCache, StringConfigCache
logger = logging.getLogger(__name__)
from app.util.log import logger
class ProwlarrMisconfigured(ValueError):
@@ -115,14 +113,14 @@ async def start_download(
assert base_url is not None and api_key is not None
url = posixpath.join(base_url, "api/v1/search")
logger.debug("Starting download for %s", guid)
logger.debug("Starting download", guid=guid)
async with client_session.post(
url,
json={"guid": guid, "indexerId": indexer_id},
headers={"X-Api-Key": api_key},
) as response:
if not response.ok:
logger.error("Failed to start download for %s: %s", guid, response)
logger.error("Failed to start download", guid=guid, response=response)
await send_all_notifications(
EventEnum.on_failed_download,
requester_username,
@@ -133,7 +131,7 @@ async def start_download(
},
)
else:
logger.debug("Download successfully started for %s", guid)
logger.debug("Download successfully started", guid=guid)
await send_all_notifications(
EventEnum.on_successful_download, requester_username, book_asin
)
@@ -180,7 +178,7 @@ async def query_prowlarr(
url = posixpath.join(base_url, f"api/v1/search?{urlencode(params, doseq=True)}")
logger.info("Querying prowlarr: %s", url)
logger.info("Querying prowlarr", url=url)
async with client_session.get(
url,
@@ -193,7 +191,7 @@ async def query_prowlarr(
try:
if result["protocol"] not in ["torrent", "usenet"]:
logger.info(
"Skipping source with unknown protocol %s", result["protocol"]
"Skipping source with unknown protocol", protocol=result["protocol"]
)
continue
if result["protocol"] == "torrent":
@@ -236,7 +234,7 @@ async def query_prowlarr(
)
)
except KeyError as e:
logger.error("Failed to parse source: %s. KeyError: %s", result, e)
logger.error("Failed to parse source", source=result, keyerror=str(e))
# add additional metadata using any available indexers
container = SessionContainer(session=session, client_session=client_session)
@@ -287,14 +285,14 @@ async def get_indexers(
)
url = posixpath.join(base_url, "api/v1/indexer")
logger.info("Fetching indexers from Prowlarr: %s", url)
logger.info("Fetching indexers from Prowlarr", url=url)
async with client_session.get(
url,
headers={"X-Api-Key": api_key},
) as response:
if not response.ok:
logger.error("Failed to fetch indexers: %s", response)
logger.error("Failed to fetch indexers", response=response)
return IndexerResponse(
state="failedFetch",
error=f"{response.status}: {response.reason}",
@@ -314,7 +312,7 @@ async def get_indexers(
state="ok",
)
except Exception as e:
logger.error("Failed to access Prowlarr to fetch indexers: %s", e)
logger.error("Failed to access Prowlarr to fetch indexers", error=str(e))
return IndexerResponse(
state="failedFetch",
error=str(e),

View File

@@ -1,13 +1,11 @@
import asyncio
import logging
from types import CoroutineType
from typing import Any
from app.internal.indexers.abstract import SessionContainer
from app.internal.indexers.indexer_util import get_indexer_contexts
from app.internal.models import BookRequest, ProwlarrSource
logger = logging.getLogger(__name__)
from app.util.log import logger
async def edit_source_metadata(
@@ -24,7 +22,7 @@ async def edit_source_metadata(
exceptions = await asyncio.gather(*coros, return_exceptions=True)
for exc in exceptions:
if exc:
logger.error("Failed to setup indexer: %s", exc)
logger.error("Failed to setup indexer", error=str(exc))
coros: list[CoroutineType[Any, Any, None]] = []
for source in sources:
@@ -36,4 +34,4 @@ async def edit_source_metadata(
exceptions = await asyncio.gather(*coros, return_exceptions=True)
for exc in exceptions:
if exc:
logger.error("Failed to edit source metadata: %s", exc)
logger.error("Failed to edit source metadata", error=str(exc))

View File

@@ -1,4 +1,3 @@
import logging
from typing import Any
from urllib.parse import quote_plus, urlencode
@@ -18,19 +17,10 @@ from app.internal.env_settings import Settings
from app.internal.models import User
from app.routers import auth, root, search, settings, wishlist
from app.util.db import open_session
from app.util.fetch_js import fetch_scripts
from app.util.redirect import BaseUrlRedirectResponse
from app.util.templates import templates
from app.util.toast import ToastException
from app.util.fetch_js import fetch_scripts
logger = logging.getLogger(__name__)
logging.getLogger("uvicorn").handlers.clear()
stream_handler = logging.StreamHandler()
logging.basicConfig(
level=Settings().app.log_level,
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
handlers=[stream_handler],
)
# intialize js dependencies or throw an error if not in debug mode
fetch_scripts(Settings().app.debug)

View File

@@ -1,5 +1,4 @@
import base64
import logging
import secrets
import time
from typing import Annotated, Optional
@@ -23,14 +22,13 @@ from app.internal.auth.oidc_config import InvalidOIDCConfiguration, oidc_config
from app.internal.models import GroupEnum, User
from app.util.connection import get_connection
from app.util.db import get_session
from app.util.log import logger
from app.util.redirect import BaseUrlRedirectResponse
from app.util.templates import templates
from app.util.toast import ToastException
router = APIRouter(prefix="/auth")
logger = logging.getLogger(__name__)
@router.get("/login")
async def login(
@@ -75,8 +73,11 @@ async def login(
if oidc_config.get_redirect_https(session):
auth_redirect_uri = auth_redirect_uri.replace("http:", "https:")
logger.info(f"Redirecting to OIDC login: {authorize_endpoint}")
logger.info(f"Redirect URI: {auth_redirect_uri}")
logger.info(
"Redirecting to OIDC login",
authorize_endpoint=authorize_endpoint,
redirect_uri=auth_redirect_uri,
)
state = jwt.encode( # pyright: ignore[reportUnknownMemberType]
{"redirect_uri": redirect_uri},

View File

@@ -1,5 +1,4 @@
import json
import logging
import uuid
from typing import Annotated, Any, Optional, cast
@@ -38,12 +37,11 @@ from app.internal.ranking.quality import IndexerFlag, QualityRange, quality_conf
from app.util import json_type
from app.util.connection import get_connection
from app.util.db import get_session
from app.util.log import logger
from app.util.templates import template_response
from app.util.time import Minute
from app.util.toast import ToastException
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/settings")
@@ -916,9 +914,7 @@ async def update_indexers(
if context.type is bool:
value = False
else:
logger.error(
"Missing value for '%s' while trying to update indexer", key
)
logger.error("Value is missing for key", key=key)
continue
if context.type is bool:
indexer_configuration_cache.set_bool(session, key, value == "on")

20
app/util/log.py Normal file
View File

@@ -0,0 +1,20 @@
import logging
import structlog
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.StackInfoRenderer(),
structlog.dev.set_exc_info,
structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False),
structlog.dev.ConsoleRenderer(),
],
wrapper_class=structlog.make_filtering_bound_logger(logging.NOTSET),
context_class=dict,
logger_factory=structlog.PrintLoggerFactory(),
cache_logger_on_first_use=False,
)
logger: structlog.stdlib.BoundLogger = structlog.get_logger()

View File

@@ -1,6 +0,0 @@
---
title: 'Docs'
date: 2025-06-09T13:52:47+02:00
---
{{< alert title="Todo" >}}{{< /alert >}}

View File

@@ -26,6 +26,7 @@ dependencies = [
"rapidfuzz",
"python-multipart",
"requests",
"structlog",
]
# setuptools by default expects a "src" folder structure

View File

@@ -24,18 +24,21 @@
name="username"
type="text"
class="input w-full"
placeholder="admin"
minlength="1"
required />
<label for="password">Password</label>
<input id="password"
name="password"
type="password"
placeholder="●●●●●●●●●●●●●●●●●"
class="input w-full"
required />
<label for="confirm_password">Confirm password</label>
<input id="confirm_password"
name="confirm_password"
type="password"
placeholder="●●●●●●●●●●●●●●●●●"
class="input w-full"
required />
{% block init_messages %}

View File

@@ -27,11 +27,13 @@
name="username"
type="text"
class="input w-full"
placeholder="admin"
required />
<label for="password">Password</label>
<input id="password"
name="password"
type="password"
placeholder="●●●●●●●●●●●●●●●●●"
class="input w-full"
required />
<button id="submit" class="btn btn-primary" type="submit">Login</button>

1431
uv.lock generated

File diff suppressed because it is too large Load Diff