Minor: remove log dep from library crates.

This commit is contained in:
Sebastian Jeltsch
2025-03-31 11:26:03 +02:00
parent 911f27abdc
commit d9525cd754
46 changed files with 104 additions and 101 deletions
Generated
+1 -1
View File
@@ -6841,7 +6841,6 @@ dependencies = [
"infer",
"jsonschema",
"lazy_static",
"log",
"parking_lot",
"rusqlite",
"schemars",
@@ -6851,6 +6850,7 @@ dependencies = [
"sqlite-vec",
"thiserror 2.0.12",
"tokio",
"tracing",
"trailbase-extension",
"trailbase-sqlean",
"uuid",
-1
View File
@@ -48,7 +48,6 @@ jsonschema = { version = "0.29.0", default-features = false }
jsonwebtoken = { version = "^9.3.0", default-features = false, features = ["use_pem"] }
lazy_static = "1.4.0"
lettre = { version = "^0.11.7", default-features = false, features = ["tokio1-rustls-tls", "sendmail-transport", "smtp-transport", "builder"] }
log = "^0.4.21"
minijinja = { version = "2.1.2", default-features = false, features= ["serde"] }
oauth2 = { version = "5.0.0-alpha.4", default-features = false, features = ["reqwest", "rustls-tls"] }
object_store = { version = "0.12.0", default-features = false, features = ["aws", "fs"] }
+1 -1
View File
@@ -1,8 +1,8 @@
use axum::body::Body;
use axum::http::{header::CONTENT_TYPE, StatusCode};
use axum::response::{IntoResponse, Response};
use log::*;
use thiserror::Error;
use tracing::*;
// FIXME: Admin APIs also deserve more explicit error handling eventually.
#[derive(Debug, Error)]
+1 -1
View File
@@ -4,10 +4,10 @@ use axum::{
};
use chrono::{DateTime, Duration, Utc};
use lazy_static::lazy_static;
use log::*;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::collections::HashMap;
use tracing::*;
use ts_rs::TS;
use uuid::Uuid;
+1 -1
View File
@@ -1,8 +1,8 @@
use axum::extract::{Json, Path, RawQuery, State};
use log::*;
use serde::Serialize;
use std::borrow::Cow;
use std::sync::Arc;
use tracing::*;
use ts_rs::TS;
use crate::admin::AdminError as Error;
@@ -4,8 +4,8 @@ use axum::{
response::{IntoResponse, Response},
Json,
};
use log::*;
use serde::Deserialize;
use tracing::*;
use ts_rs::TS;
use crate::admin::AdminError as Error;
@@ -6,8 +6,8 @@ use axum::{
response::{IntoResponse, Response},
Json,
};
use log::*;
use serde::Deserialize;
use tracing::*;
use ts_rs::TS;
use crate::app_state::AppState;
+1 -1
View File
@@ -4,8 +4,8 @@ use axum::{
response::{IntoResponse, Response},
Json,
};
use log::*;
use serde::Deserialize;
use tracing::*;
use ts_rs::TS;
use crate::admin::AdminError as Error;
+1 -1
View File
@@ -4,8 +4,8 @@ use axum::{
response::{IntoResponse, Response},
Json,
};
use log::*;
use serde::Deserialize;
use tracing::*;
use ts_rs::TS;
use crate::admin::AdminError as Error;
@@ -1,6 +1,6 @@
use axum::{extract::State, Json};
use log::*;
use serde::{Deserialize, Serialize};
use tracing::*;
use ts_rs::TS;
use crate::admin::AdminError as Error;
+1 -1
View File
@@ -3,9 +3,9 @@ use axum::{
Json,
};
use lazy_static::lazy_static;
use log::*;
use serde::Serialize;
use std::borrow::Cow;
use tracing::*;
use ts_rs::TS;
use uuid::Uuid;
+1 -1
View File
@@ -28,7 +28,7 @@ pub async fn is_demo_admin(state: &AppState, id: &Uuid) -> bool {
{
Ok(value) => value.unwrap_or(true),
Err(err) => {
log::error!("{err}");
tracing::error!("{err}");
true
}
};
+3 -3
View File
@@ -1,7 +1,7 @@
use log::*;
use object_store::ObjectStore;
use std::path::PathBuf;
use std::sync::Arc;
use tracing::*;
use crate::auth::jwt::JwtHelper;
use crate::auth::oauth::providers::{ConfiguredOAuthProviders, OAuthProviderType};
@@ -108,7 +108,7 @@ impl AppState {
dev: args.dev,
demo: args.demo,
oauth: Computed::new(&config, |c| {
log::debug!("building oauth from config");
debug!("building oauth from config");
match ConfiguredOAuthProviders::from_config(c.auth.clone()) {
Ok(providers) => providers,
Err(err) => {
@@ -118,7 +118,7 @@ impl AppState {
}
}),
jobs: Computed::new(&config, move |c| {
log::debug!("building jobs from config");
debug!("building jobs from config");
let (ref data_dir, ref conn, ref logs_conn) = jobs_input;
return build_job_registry_from_config(c, data_dir, conn, logs_conn).unwrap_or_else(
+1 -1
View File
@@ -89,7 +89,7 @@ impl<E: RustEmbed> Future for ServeFuture<E> {
};
#[cfg(test)]
log::debug!("asset path: {:?}", self.request.uri());
tracing::debug!("asset path: {:?}", self.request.uri());
let Some(file) = E::get(path).or_else(|| {
self
+1 -1
View File
@@ -25,7 +25,7 @@ async fn get_avatar_url(state: &AppState, user: &DbUser) -> Option<String> {
.query_value(&QUERY, params!(user.id))
.await
.map_err(|err| {
log::debug!("avatar query broken?");
tracing::debug!("avatar query broken?");
return err;
})
.unwrap_or_default()
+1 -1
View File
@@ -98,7 +98,7 @@ pub async fn register_user_handler(
.await
.map_err(|_err| {
#[cfg(debug_assertions)]
log::debug!("Failed to create user {normalized_email}: {_err}");
tracing::debug!("Failed to create user {normalized_email}: {_err}");
// The insert will fail if the user is already registered
AuthError::Conflict
})?
+1 -1
View File
@@ -1,8 +1,8 @@
use axum::body::Body;
use axum::http::{header::CONTENT_TYPE, StatusCode};
use axum::response::{IntoResponse, Response};
use log::*;
use thiserror::Error;
use tracing::*;
#[derive(Debug, Error)]
pub enum AuthError {
@@ -9,10 +9,10 @@ mod oidc;
pub(crate) mod test;
use lazy_static::lazy_static;
use log::*;
use std::collections::hash_map::HashMap;
use std::sync::Arc;
use thiserror::Error;
use tracing::*;
use crate::auth::oauth::OAuthProvider;
use crate::config::proto::{AuthConfig, OAuthProviderConfig, OAuthProviderId};
+1 -1
View File
@@ -228,7 +228,7 @@ pub(crate) async fn reauth_with_refresh_token(
// 3. User explicitly logged out, which will delete **all** sessions for that user.
// 4. Database was overwritten, e.g. by tests or periodic reset for the demo.
#[cfg(debug_assertions)]
log::debug!("Refresh token not found");
tracing::debug!("Refresh token not found");
return Err(AuthError::Unauthorized);
};
+1 -1
View File
@@ -1,5 +1,4 @@
use lazy_static::lazy_static;
use log::*;
use prost_reflect::{
DynamicMessage, ExtensionDescriptor, FieldDescriptor, Kind, MapKey, ReflectMessage, Value,
};
@@ -9,6 +8,7 @@ use std::convert::TryFrom;
use std::str::FromStr;
use thiserror::Error;
use tokio::fs;
use tracing::*;
use validator::{ValidateEmail, ValidateUrl};
use crate::auth::oauth::providers::oauth_provider_registry;
+1 -1
View File
@@ -1,6 +1,6 @@
use log::*;
use std::path::PathBuf;
use tokio::{fs, io::AsyncWriteExt};
use tracing::*;
/// The base data directory where the sqlite database, config, etc. will be stored.
#[derive(Debug, Clone)]
+3 -3
View File
@@ -86,7 +86,7 @@ impl Email {
body: Some(body),
}) => (subject, body),
_ => {
log::debug!("Falling back to default email verification email");
tracing::debug!("Falling back to default email verification email");
(
defaults::EMAIL_VALIDATION_SUBJECT.to_string(),
defaults::EMAIL_VALIDATION_BODY.to_string(),
@@ -131,7 +131,7 @@ impl Email {
body: Some(body),
}) => (subject, body),
_ => {
log::debug!("Falling back to default change email template");
tracing::debug!("Falling back to default change email template");
(
defaults::CHANGE_EMAIL_SUBJECT.to_string(),
defaults::CHANGE_EMAIL_BODY.to_string(),
@@ -176,7 +176,7 @@ impl Email {
body: Some(body),
}) => (subject, body),
_ => {
log::debug!("Falling back to default reset password email");
tracing::debug!("Falling back to default reset password email");
(
defaults::PASSWORD_RESET_SUBJECT.to_string(),
defaults::PASSWORD_RESET_BODY.to_string(),
+1 -1
View File
@@ -3,10 +3,10 @@ use axum::http::header::CONTENT_TYPE;
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use axum::Json;
use log::*;
use serde::de::DeserializeOwned;
use serde::Serialize;
use thiserror::Error;
use tracing::*;
use trailbase_sqlite::schema::FileUploadInput;
use crate::extract::multipart::{parse_multipart, Rejection as MultipartRejection};
+2 -2
View File
@@ -17,7 +17,7 @@ impl ImportProvider for ImportProviderImpl {
_referrer: &str,
_kind: ResolutionKind,
) -> Option<Result<ModuleSpecifier, Error>> {
log::trace!("resolve: {specifier:?}");
tracing::trace!("resolve: {specifier:?}");
// Specifier is just a URL.
match specifier.scheme() {
@@ -37,7 +37,7 @@ impl ImportProvider for ImportProviderImpl {
_is_dyn_import: bool,
_requested_module_type: RequestedModuleType,
) -> Option<Result<String, Error>> {
log::trace!("import: {specifier:?}");
tracing::trace!("import: {specifier:?}");
match specifier.scheme() {
"trailbase" => {
+22 -21
View File
@@ -16,6 +16,7 @@ use std::sync::{Arc, OnceLock};
use std::time::Duration;
use thiserror::Error;
use tokio::sync::oneshot;
use tracing::*;
use crate::assets::cow_to_string;
use crate::auth::user::User;
@@ -95,7 +96,7 @@ impl Drop for RuntimeSingleton {
if let Some(handle) = self.handle.take() {
self.state.clear();
if handle.join().is_err() {
log::error!("Failed to join main rt thread");
error!("Failed to join main rt thread");
}
}
}
@@ -120,7 +121,7 @@ impl Completer {
.map_err(|err| JsResponseError::Internal(err.into()));
if self.reply.send(value).is_err() {
log::error!("Completer send failed for : {}", self.name);
error!("Completer send failed for : {}", self.name);
}
}
}
@@ -157,7 +158,7 @@ impl RuntimeSingleton {
.send(Err(JsResponseError::Internal(err.into())))
.is_err()
{
log::error!("dispatch sending error failed");
error!("dispatch sending error failed");
}
return Ok(());
}
@@ -182,13 +183,13 @@ impl RuntimeSingleton {
.map_err(|err| err.into());
if sender.send(result).is_err() {
log::error!("Sending of js function call reply failed");
error!("Sending of js function call reply failed");
}
}
Message::LoadModule(module, sender) => {
runtime.load_module_async(&module).await?;
if sender.send(Ok(())).is_err() {
log::error!("Load module send failed");
error!("Load module send failed");
}
}
}
@@ -232,7 +233,7 @@ impl RuntimeSingleton {
tokio::select! {
result = runtime.await_event_loop(OPTS, DURATION), if pending => {
if let Err(err) = result{
log::error!("JS event loop: {err}");
error!("JS event loop: {err}");
}
},
msg = private_recv.recv() => {
@@ -240,7 +241,7 @@ impl RuntimeSingleton {
panic!("private channel closed");
};
if let Err(err) = Self::handle_message(runtime, msg, &mut completers).await {
log::error!("Handle private message: {err}");
error!("Handle private message: {err}");
}
},
msg = shared_recv.recv() => {
@@ -248,7 +249,7 @@ impl RuntimeSingleton {
panic!("private channel closed");
};
if let Err(err) = Self::handle_message(runtime, msg, &mut completers).await {
log::error!("Handle shared message: {err}");
error!("Handle shared message: {err}");
}
},
}
@@ -261,14 +262,14 @@ impl RuntimeSingleton {
Some(n) => n,
None => std::thread::available_parallelism().map_or_else(
|err| {
log::error!("Failed to get number of threads: {err}");
error!("Failed to get number of threads: {err}");
return 1;
},
|x| x.get(),
),
};
log::info!("Starting v8 JavaScript runtime with {n_threads} workers.");
info!("Starting v8 JavaScript runtime with {n_threads} workers.");
let (shared_sender, shared_receiver) = async_channel::unbounded::<Message>();
@@ -320,7 +321,7 @@ impl RuntimeSingleton {
for (idx, thread) in threads.into_iter().enumerate() {
if let Err(err) = thread.join() {
log::error!("Failed to join worker: {idx}: {err:?}");
error!("Failed to join worker: {idx}: {err:?}");
}
}
}))
@@ -445,7 +446,7 @@ impl RuntimeHandle {
for s in &self.runtime.state {
let mut lock = s.connection.lock();
if lock.is_some() {
log::debug!("connection already set");
debug!("connection already set");
} else {
lock.replace(conn.clone());
}
@@ -457,7 +458,7 @@ impl RuntimeHandle {
for s in &self.runtime.state {
let mut lock = s.connection.lock();
if lock.is_some() {
log::debug!("connection already set");
debug!("connection already set");
}
lock.replace(conn.clone());
}
@@ -606,7 +607,7 @@ fn add_route_to_router(
let (sender, receiver) = oneshot::channel::<Result<JsResponse, JsResponseError>>();
log::debug!("dispatch {method} {uri}");
debug!("dispatch {method} {uri}");
runtime_handle
.runtime
.sender
@@ -789,7 +790,7 @@ async fn install_routes_and_jobs(
}
}
Err(err) => {
log::error!("Failed to load module: {err}");
error!("Failed to load module: {err}");
None
}
}
@@ -813,7 +814,7 @@ async fn await_loading_module(state: &State, module: Module) -> Result<(), AnyEr
.await?;
let _ = receiver.await.map_err(|err| {
log::error!("Failed to await module loading: {err}");
error!("Failed to await module loading: {err}");
return err;
})?;
@@ -825,7 +826,7 @@ pub(crate) async fn load_routes_and_jobs_from_js_modules(
) -> Result<Option<Router<AppState>>, AnyError> {
let runtime_handle = state.script_runtime();
if runtime_handle.runtime.n_threads == 0 {
log::info!("JS threads set to zero. Skipping initialization for JS modules");
info!("JS threads set to zero. Skipping initialization for JS modules");
return Ok(None);
}
@@ -834,7 +835,7 @@ pub(crate) async fn load_routes_and_jobs_from_js_modules(
let modules = match rustyscript::Module::load_dir(scripts_dir.clone()) {
Ok(modules) => modules,
Err(err) => {
log::debug!("Skip loading js modules from '{scripts_dir:?}': {err}");
debug!("Skip loading js modules from '{scripts_dir:?}': {err}");
return Ok(None);
}
};
@@ -846,7 +847,7 @@ pub(crate) async fn load_routes_and_jobs_from_js_modules(
if let Some(router) = install_routes_and_jobs(state, module).await? {
js_router = js_router.merge(router);
} else {
log::debug!("Skipping js module '{fname:?}': no routes");
debug!("Skipping js module '{fname:?}': no routes");
}
}
@@ -869,7 +870,7 @@ pub(crate) async fn write_js_runtime_files(data_dir: &DataDir) {
)
.await
{
log::warn!("Failed to write 'trailbase.js': {err}");
warn!("Failed to write 'trailbase.js': {err}");
}
if let Err(err) = tokio::fs::write(
@@ -883,7 +884,7 @@ pub(crate) async fn write_js_runtime_files(data_dir: &DataDir) {
)
.await
{
log::warn!("Failed to write 'trailbase.d.ts': {err}");
warn!("Failed to write 'trailbase.d.ts': {err}");
}
}
+1 -1
View File
@@ -1,8 +1,8 @@
use lazy_static::lazy_static;
use log::*;
use std::borrow::Cow;
use std::collections::HashMap;
use thiserror::Error;
use tracing::*;
use crate::records::params::json_string_to_value;
use crate::table_metadata::TableOrViewMetadata;
+2 -2
View File
@@ -2,13 +2,13 @@ use axum::body::Body;
use axum::http::Request;
use axum::response::Response;
use axum_client_ip::InsecureClientIp;
use log::*;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::time::Duration;
use tracing::field::Field;
use tracing::span::{Attributes, Id, Record, Span};
use tracing::Level;
use tracing::*;
use tracing_subscriber::layer::{Context, Layer};
use uuid::Uuid;
@@ -145,7 +145,7 @@ impl SqliteLogLayer {
) -> Result<(), rusqlite::Error> {
#[cfg(test)]
if !log.fields.is_empty() {
log::warn!("Dangling fields: {:?}", log.fields);
tracing::warn!("Dangling fields: {:?}", log.fields);
}
lazy_static::lazy_static! {
+1 -1
View File
@@ -1,7 +1,7 @@
use lazy_static::lazy_static;
use log::*;
use parking_lot::Mutex;
use std::path::PathBuf;
use tracing::*;
use trailbase_refinery_core::Migration;
mod main {
+1 -1
View File
@@ -1,8 +1,8 @@
use axum::body::Body;
use axum::http::{header::CONTENT_TYPE, StatusCode};
use axum::response::{IntoResponse, Response};
use log::*;
use thiserror::Error;
use tracing::*;
/// Publicly visible errors of record APIs.
///
+1 -1
View File
@@ -1,9 +1,9 @@
use axum::body::Body;
use axum::http::header;
use axum::response::{IntoResponse, Response};
use log::*;
use object_store::ObjectStore;
use thiserror::Error;
use tracing::*;
use trailbase_sqlite::schema::{FileUpload, FileUploads};
use crate::app_state::AppState;
+1 -1
View File
@@ -1,7 +1,7 @@
use base64::prelude::*;
use log::*;
use std::collections::HashSet;
use std::sync::Arc;
use tracing::*;
use trailbase_sqlite::schema::{FileUpload, FileUploadInput, FileUploads};
use trailbase_sqlite::{NamedParams, Value};
+1 -1
View File
@@ -1,8 +1,8 @@
use askama::Template;
use itertools::Itertools;
use log::*;
use object_store::ObjectStore;
use std::sync::Arc;
use tracing::*;
use trailbase_sqlite::schema::{FileUpload, FileUploads};
use trailbase_sqlite::{NamedParams, Params as _, Value};
+1 -2
View File
@@ -603,8 +603,7 @@ mod test {
let mut read_dir = tokio::fs::read_dir(state.data_dir().uploads_path())
.await
.unwrap();
while let Some(entry) = read_dir.next_entry().await.unwrap() {
log::error!("{entry:?}");
while let Some(_entry) = read_dir.next_entry().await.unwrap() {
dir_cnt += 1;
}
assert_eq!(dir_cnt, 0);
+1 -1
View File
@@ -1,9 +1,9 @@
use askama::Template;
use log::*;
use rusqlite::types::ToSqlOutput;
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
use tracing::*;
use trailbase_sqlite::{NamedParamRef, NamedParams, Params as _, Value};
use crate::auth::user::User;
+1 -1
View File
@@ -2,8 +2,8 @@ use crate::schema::ColumnOption;
use std::collections::HashMap;
use base64::prelude::*;
use log::*;
use thiserror::Error;
use tracing::*;
use crate::schema::{Column, ColumnDataType};
use crate::table_metadata::ColumnMetadata;
+7 -6
View File
@@ -15,6 +15,7 @@ use std::sync::{
Arc,
};
use std::task::{Context, Poll};
use tracing::*;
use trailbase_sqlite::connection::{extract_record_values, extract_row_id};
use crate::auth::user::User;
@@ -50,7 +51,7 @@ struct CleanupSubscription {
impl Drop for CleanupSubscription {
fn drop(&mut self) {
if self.receiver.upgrade().is_none() {
log::debug!("Subscription cleaned up already by the sender side.");
debug!("Subscription cleaned up already by the sender side.");
return;
}
@@ -285,7 +286,7 @@ impl SubscriptionManager {
match sub.sender.try_send(event.clone()) {
Ok(_) => {}
Err(async_channel::TrySendError::Full(ev)) => {
log::warn!("Channel full, dropping event: {ev:?}");
warn!("Channel full, dropping event: {ev:?}");
}
Err(async_channel::TrySendError::Closed(_ev)) => {
dead_subscriptions.push(idx);
@@ -313,7 +314,7 @@ impl SubscriptionManager {
// If table_metadata is missing, the config/schema must have changed, thus removing the
// subscriptions.
let Some(table_metadata) = table_metadata else {
log::warn!("Table not found: {table_name}. Removing subscriptions");
warn!("Table not found: {table_name}. Removing subscriptions");
let mut record_subs = s.record_subscriptions.write();
record_subs.remove(table_name);
@@ -458,13 +459,13 @@ impl SubscriptionManager {
let action: RecordAction = match action {
Action::SQLITE_UPDATE | Action::SQLITE_INSERT | Action::SQLITE_DELETE => action.into(),
a => {
log::error!("Unknown action: {a:?}");
error!("Unknown action: {a:?}");
return;
}
};
let Some(rowid) = extract_row_id(case) else {
log::error!("Failed to extract row id");
error!("Failed to extract row id");
return;
};
@@ -481,7 +482,7 @@ impl SubscriptionManager {
}
let Some(record_values) = extract_record_values(case) else {
log::error!("Failed to extract values");
error!("Failed to extract values");
return;
};
+7 -7
View File
@@ -1,7 +1,6 @@
use chrono::{DateTime, Duration, Utc};
use cron::Schedule;
use futures_util::future::BoxFuture;
use log::*;
use parking_lot::Mutex;
use std::collections::{hash_map::Entry, HashMap};
use std::future::Future;
@@ -10,6 +9,7 @@ use std::sync::{
atomic::{AtomicI32, Ordering},
Arc,
};
use tracing::*;
use trailbase_sqlite::{params, Connection};
use crate::config::proto::{Config, SystemJob, SystemJobId};
@@ -78,7 +78,7 @@ impl Job {
let (name, schedule) = {
let lock = job.state.lock();
if let Some(ref handle) = lock.handle {
log::warn!("starting an already running job");
warn!("starting an already running job");
handle.abort();
}
@@ -92,7 +92,7 @@ impl Job {
break;
};
let Ok(duration) = (next - Utc::now()).to_std() else {
log::warn!("Invalid duration for '{name}': {next:?}");
warn!("Invalid duration for '{name}': {next:?}");
continue;
};
@@ -101,7 +101,7 @@ impl Job {
let _ = job.run_now().await;
}
log::info!("Exited job: '{name}'");
info!("Exited job: '{name}'");
})
.abort_handle(),
);
@@ -198,7 +198,7 @@ impl JobRegistry {
jobs.get(&id)?.clone()
};
log::debug!("Running job {id}: {}", job.name());
debug!("Running job {id}: {}", job.name());
return Some(job.run_now().await);
}
}
@@ -436,11 +436,11 @@ pub fn build_job_registry_from_config(
}
}
None => {
log::error!("Duplicate job definition for '{name}'");
error!("Duplicate job definition for '{name}'");
}
},
Err(err) => {
log::error!("Invalid time spec for '{name}': {err}");
error!("Invalid time spec for '{name}': {err}");
}
};
}
+5 -5
View File
@@ -1,5 +1,4 @@
use itertools::Itertools;
use log::*;
use serde::{Deserialize, Serialize};
use sqlite3_parser::ast::{
fmt::ToTokens, ColumnDefinition, CreateTableBody, DeferSubclause, Expr, ForeignKeyClause,
@@ -8,6 +7,7 @@ use sqlite3_parser::ast::{
};
use std::collections::HashMap;
use thiserror::Error;
use tracing::*;
use ts_rs::TS;
#[derive(Debug, Error)]
@@ -603,7 +603,7 @@ impl TryFrom<sqlite3_parser::ast::Stmt> for Table {
});
}
TableConstraint::PrimaryKey { .. } => {
log::warn!("PK table constraint not implemented. Use column constraints.");
warn!("PK table constraint not implemented. Use column constraints.");
}
}
}
@@ -1086,7 +1086,7 @@ fn build_foreign_key(
) -> ForeignKey {
if let Some(ref clause) = deref_clause {
// TOOD: Parse DEFERRABLE.
log::warn!("Unsupported DEFERRABLE in FK clause: {clause:?}");
warn!("Unsupported DEFERRABLE in FK clause: {clause:?}");
}
let (on_update, on_delete) = unparse_fk_trigger(&clause.args);
@@ -1127,12 +1127,12 @@ fn unparse_fk_trigger(
on_update = Some((*action).into());
}
RefArg::OnInsert(action) => {
log::error!("Unexpected ON INSERT in FK clause: {action:?}");
error!("Unexpected ON INSERT in FK clause: {action:?}");
}
RefArg::Match(name) => {
// SQL supports FK MATCH clause, which is *not* supported by sqlite:
// https://www.sqlite.org/foreignkeys.html#fk_unsupported
log::warn!("Unsupported MATCH in FK clause: {name:?}");
warn!("Unsupported MATCH in FK clause: {name:?}");
}
}
}
+1 -1
View File
@@ -1,6 +1,6 @@
use log::*;
use std::path::PathBuf;
use thiserror::Error;
use tracing::*;
use crate::app_state::{build_objectstore, AppState, AppStateArgs};
use crate::auth::jwt::{JwtHelper, JwtHelperError};
+9 -8
View File
@@ -20,6 +20,7 @@ use tokio_rustls::{
};
use tower_cookies::CookieManagerLayer;
use tower_http::{cors, limit::RequestBodyLimitLayer, services::ServeDir, trace::TraceLayer};
use tracing::*;
use crate::admin;
use crate::app_state::AppState;
@@ -109,7 +110,7 @@ impl Server {
O: std::future::Future<Output = Result<(), Box<dyn std::error::Error + Sync + Send>>>,
{
let version_info = rustc_tools_util::get_version_info!();
log::info!(
info!(
"Initializing server version: {hash} {date}",
hash = version_info.commit_hash.unwrap_or_default(),
date = version_info.commit_date.unwrap_or_default(),
@@ -198,7 +199,7 @@ impl Server {
set.spawn(async move { Self::start_listen(&addr, router, tls_key, tls_cert).await });
}
log::info!(
info!(
"listening on http://{addr} 🚀 (Admin UI http://{admin_addr}/_/admin/)",
addr = self.main_router.0,
admin_addr = self
@@ -223,7 +224,7 @@ impl Server {
let tcp_listener = match tokio::net::TcpListener::bind(addr).await {
Ok(listener) => listener,
Err(err) => {
log::error!("Failed to listen on: {addr}: {err}");
error!("Failed to listen on: {addr}: {err}");
std::process::exit(1);
}
};
@@ -242,7 +243,7 @@ impl Server {
.with_graceful_shutdown(shutdown_signal())
.await
{
log::error!("Failed to start server: {err}");
error!("Failed to start server: {err}");
std::process::exit(1);
}
}
@@ -250,7 +251,7 @@ impl Server {
let listener = match tokio::net::TcpListener::bind(addr).await {
Ok(listener) => listener,
Err(err) => {
log::error!("Failed to listen on: {addr}: {err}");
error!("Failed to listen on: {addr}: {err}");
std::process::exit(1);
}
};
@@ -259,7 +260,7 @@ impl Server {
.with_graceful_shutdown(shutdown_signal())
.await
{
log::error!("Failed to start server: {err}");
error!("Failed to start server: {err}");
std::process::exit(1);
}
}
@@ -421,7 +422,7 @@ fn build_cors(opts: &ServerOptions) -> cors::CorsLayer {
let wildcard = origin_strs.iter().any(|s| s == "*");
let origins = if wildcard {
log::info!("CORS: allow any origin");
info!("CORS: allow any origin");
// cors::AllowOrigin::any()
cors::AllowOrigin::mirror_request()
} else {
@@ -429,7 +430,7 @@ fn build_cors(opts: &ServerOptions) -> cors::CorsLayer {
match HeaderValue::from_str(o.as_str()) {
Ok(value) => Some(value),
Err(err) => {
log::error!("Invalid CORS origin {o}: {err}");
error!("Invalid CORS origin {o}: {err}");
None
}
}
+8 -7
View File
@@ -23,6 +23,7 @@ use tokio::sync::watch;
use tokio_rustls::TlsAcceptor;
use tower::ServiceExt as _;
use tower_service::Service;
use tracing::*;
/// Types that can listen for connections.
pub trait Listener: Send + 'static {
@@ -79,7 +80,7 @@ async fn handle_accept_error(e: io::Error) {
// > and then the listener will sleep for 1 second.
//
// hyper allowed customizing this but axum does not.
log::warn!("accept error: {e}");
warn!("accept error: {e}");
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
}
@@ -381,7 +382,7 @@ where
let signal_tx = Arc::new(signal_tx);
tokio::spawn(async move {
signal.await;
log::trace!("received graceful shutdown signal. Telling tasks to shutdown");
trace!("received graceful shutdown signal. Telling tasks to shutdown");
drop(signal_rx);
});
@@ -396,14 +397,14 @@ where
tuple
},
_ = signal_tx.closed() => {
log::trace!("signal received, not accepting new connections");
trace!("signal received, not accepting new connections");
break;
}
};
let io = TokioIo::new(io);
log::trace!("connection {remote_addr:?} accepted");
trace!("connection {remote_addr:?} accepted");
poll_fn(|cx| make_service.poll_ready(cx))
.await
@@ -439,12 +440,12 @@ where
tokio::select! {
result = conn.as_mut() => {
if let Err(_err) = result {
log::trace!("failed to serve connection: {_err:#}");
trace!("failed to serve connection: {_err:#}");
}
break;
}
_ = &mut signal_closed => {
log::trace!("signal received in task, starting graceful shutdown");
trace!("signal received in task, starting graceful shutdown");
conn.as_mut().graceful_shutdown();
}
}
@@ -457,7 +458,7 @@ where
drop(close_rx);
drop(listener);
log::trace!(
trace!(
"waiting for {} task(s) to finish",
close_tx.receiver_count()
);
+1 -1
View File
@@ -1,7 +1,6 @@
use fallible_iterator::FallibleIterator;
use jsonschema::Validator;
use lazy_static::lazy_static;
use log::*;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_json::Value;
@@ -10,6 +9,7 @@ use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
use thiserror::Error;
use tracing::*;
use trailbase_sqlite::{params, NamedParams};
use crate::constants::{SQLITE_SCHEMA_TABLE, USER_TABLE};
+1 -1
View File
@@ -1,6 +1,6 @@
use log::*;
use std::path::{Path, PathBuf};
use thiserror::Error;
use tracing::*;
use crate::migrations;
+2 -1
View File
@@ -1,5 +1,6 @@
use rcgen::{generate_simple_self_signed, CertifiedKey};
use tokio_rustls::rustls::pki_types::{pem::PemObject, PrivateKeyDer};
use tracing::*;
use trailbase::{DataDir, Server, ServerOptions};
#[test]
@@ -57,7 +58,7 @@ fn test_https_serving() {
.send()
.await;
log::debug!("{response:?}");
debug!("{response:?}");
if let Ok(response) = response {
assert_eq!(response.text().await.unwrap(), "Ok");
+1 -1
View File
@@ -13,7 +13,6 @@ crossbeam-channel = "0.5.13"
infer = "0.19.0"
jsonschema = { version = "0.29.0", default-features = false }
lazy_static = "1.5.0"
log = "0.4.22"
rusqlite = { workspace = true }
schemars = "0.8.21"
serde = { version = "^1.0.203", features = ["derive"] }
@@ -23,6 +22,7 @@ trailbase-sqlean = { workspace = true }
sqlite-vec = "0.1.6"
thiserror = "2.0.1"
tokio = { version = "^1.38.0", features = ["macros", "rt-multi-thread", "fs", "sync"] }
tracing = { version = "0.1.40", default-features = false }
trailbase-extension = { workspace = true }
uuid = { workspace = true }
parking_lot = { version = "0.12.3", default-features = false }
+1 -1
View File
@@ -17,7 +17,7 @@ extern "C" fn init_trailbase_extensions(
// https://github.com/nalgeon/sqlean/blob/main/docs/define.md
let status = unsafe { trailbase_sqlean::define_init(db as *mut trailbase_sqlean::sqlite3) };
if status != 0 {
log::error!("Failed to load sqlean::define",);
tracing::error!("Failed to load sqlean::define",);
return status;
}