mirror of
https://github.com/trailbaseio/trailbase.git
synced 2025-12-30 14:19:43 -06:00
Move JS runtime into a separate crate.
This commit is contained in:
39
Cargo.lock
generated
39
Cargo.lock
generated
@@ -6732,9 +6732,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"prost",
|
||||
"prost-build",
|
||||
"prost-reflect",
|
||||
"prost-reflect-build",
|
||||
"quoted_printable",
|
||||
"rand 0.9.1",
|
||||
"rcgen",
|
||||
@@ -6742,7 +6740,6 @@ dependencies = [
|
||||
"reqwest",
|
||||
"rusqlite",
|
||||
"rustc_tools_util",
|
||||
"rustyscript",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -6763,7 +6760,9 @@ dependencies = [
|
||||
"tracing-subscriber",
|
||||
"trailbase-apalis",
|
||||
"trailbase-assets",
|
||||
"trailbase-build",
|
||||
"trailbase-extension",
|
||||
"trailbase-js",
|
||||
"trailbase-refinery-core",
|
||||
"trailbase-refinery-macros",
|
||||
"trailbase-schema",
|
||||
@@ -6806,10 +6805,20 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"askama",
|
||||
"axum 0.8.3",
|
||||
"env_logger",
|
||||
"log",
|
||||
"rust-embed",
|
||||
"tower-service",
|
||||
"trailbase-build",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trailbase-build"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"env_logger",
|
||||
"log",
|
||||
"prost-build",
|
||||
"prost-reflect-build",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6872,6 +6881,27 @@ dependencies = [
|
||||
"validator",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trailbase-js"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"axum 0.8.3",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"kanal",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rust-embed",
|
||||
"rustyscript",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.12",
|
||||
"tokio",
|
||||
"tracing-subscriber",
|
||||
"trailbase-build",
|
||||
"trailbase-sqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trailbase-refinery-core"
|
||||
version = "0.8.16"
|
||||
@@ -6941,6 +6971,7 @@ dependencies = [
|
||||
name = "trailbase-sqlite"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"criterion",
|
||||
"crossbeam-channel",
|
||||
"env_logger",
|
||||
|
||||
15
Cargo.toml
15
Cargo.toml
@@ -6,9 +6,11 @@ members = [
|
||||
"examples/custom-binary",
|
||||
"trailbase-apalis",
|
||||
"trailbase-assets",
|
||||
"trailbase-build",
|
||||
"trailbase-cli",
|
||||
"trailbase-core",
|
||||
"trailbase-extension",
|
||||
"trailbase-js",
|
||||
"trailbase-schema",
|
||||
"trailbase-sqlite",
|
||||
"vendor/sqlean",
|
||||
@@ -16,9 +18,11 @@ members = [
|
||||
default-members = [
|
||||
"client/trailbase-rs",
|
||||
"trailbase-assets",
|
||||
"trailbase-build",
|
||||
"trailbase-cli",
|
||||
"trailbase-core",
|
||||
"trailbase-extension",
|
||||
"trailbase-js",
|
||||
"trailbase-schema",
|
||||
"trailbase-sqlite",
|
||||
]
|
||||
@@ -43,7 +47,7 @@ opt-level = 3
|
||||
# still using it in github releases.
|
||||
lto = "thin" # ("off", "thin", "fat")
|
||||
codegen-units = 16
|
||||
strip = "debuginfo"
|
||||
strip = "debuginfo" # ("symbols", "debuginfo", "none")
|
||||
|
||||
# Workaround for https://github.com/gwenn/lemon-rs/issues/78. sqlite3-parser
|
||||
# requires 1+MB stack frames to parse trivial SQL statements, which is larger
|
||||
@@ -59,13 +63,18 @@ axum = { version = "^0.8.1", features = ["multipart"] }
|
||||
env_logger = { version = "^0.11.8", default-features = false, features = ["auto-color", "humantime"] }
|
||||
libsqlite3-sys = { version = "0.32.0", features = ["bundled"] }
|
||||
rusqlite = { version = "0.34.0", default-features = false, features = ["bundled", "column_decltype", "load_extension", "modern_sqlite", "functions", "limits", "backup", "hooks", "preupdate_hook"] }
|
||||
rust-embed = { version = "8.4.0", default-features = false, features = ["mime-guess"] }
|
||||
tokio = { version = "^1.38.0", features = ["macros", "rt-multi-thread", "fs", "signal", "time", "sync"] }
|
||||
trailbase-refinery-core = { path = "vendor/refinery/refinery_core", version = "0.8.16", default-features = false, features = ["rusqlite-bundled"] }
|
||||
trailbase-refinery-macros = { path = "vendor/refinery/refinery_macros", version = "0.8.15" }
|
||||
tracing = { version = "0.1.40", default-features = false }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["smallvec", "std", "fmt", "json"] }
|
||||
trailbase-apalis = { path = "trailbase-apalis", version = "0.1.0" }
|
||||
trailbase-build = { path = "trailbase-build", version = "0.1.0" }
|
||||
trailbase-assets = { path = "trailbase-assets", version = "0.1.0" }
|
||||
trailbase-sqlean = { path = "vendor/sqlean", version = "0.0.2" }
|
||||
trailbase-extension = { path = "trailbase-extension", version = "0.2.0" }
|
||||
trailbase-js = { path = "trailbase-js", version = "0.1.0" }
|
||||
trailbase-refinery-core = { path = "vendor/refinery/refinery_core", version = "0.8.16", default-features = false, features = ["rusqlite-bundled"] }
|
||||
trailbase-refinery-macros = { path = "vendor/refinery/refinery_macros", version = "0.8.15" }
|
||||
trailbase-schema = { path = "trailbase-schema", version = "0.1.0" }
|
||||
trailbase-sqlite = { path = "trailbase-sqlite", version = "0.2.0" }
|
||||
trailbase = { path = "trailbase-core", version = "0.1.0" }
|
||||
|
||||
2
pnpm-lock.yaml
generated
2
pnpm-lock.yaml
generated
@@ -657,7 +657,7 @@ importers:
|
||||
specifier: ^3.1.2
|
||||
version: 3.1.2(@types/debug@4.1.12)(@types/node@22.15.2)(happy-dom@15.11.7)(jiti@2.4.2)(jsdom@26.1.0)(lightningcss@1.29.2)(yaml@2.7.1)
|
||||
|
||||
trailbase-assets/js/runtime:
|
||||
trailbase-js/assets/runtime:
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.25.1
|
||||
|
||||
@@ -8,7 +8,7 @@ packages:
|
||||
- 'trailbase-assets/js/admin'
|
||||
- 'trailbase-assets/js/auth'
|
||||
- 'trailbase-assets/js/client'
|
||||
- 'trailbase-assets/js/runtime'
|
||||
- 'trailbase-js/assets/runtime'
|
||||
options:
|
||||
link-workspace-packages: true
|
||||
prefer-workspace-packages: true
|
||||
|
||||
@@ -16,9 +16,8 @@ exclude = [
|
||||
askama = { workspace = true }
|
||||
axum = { workspace = true }
|
||||
log = "0.4.27"
|
||||
rust-embed = { version = "8.4.0", default-features = false, features = ["mime-guess"] }
|
||||
rust-embed = { workspace = true }
|
||||
tower-service = { version = "0.3.3", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
env_logger = { workspace = true }
|
||||
log = "0.4.27"
|
||||
trailbase-build = { workspace = true }
|
||||
|
||||
@@ -1,117 +1,29 @@
|
||||
#![allow(clippy::needless_return)]
|
||||
|
||||
use log::*;
|
||||
use std::env;
|
||||
use std::fs::{self};
|
||||
use std::io::{Result, Write};
|
||||
use std::path::Path;
|
||||
|
||||
#[allow(unused)]
|
||||
fn copy_dir(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
fs::create_dir_all(&dst)?;
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
if entry.file_name().to_string_lossy().starts_with(".") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if entry.file_type()?.is_dir() {
|
||||
copy_dir(entry.path(), dst.as_ref().join(entry.file_name()))?;
|
||||
} else {
|
||||
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn write_output(mut sink: impl Write, source: &[u8], header: &str) -> Result<()> {
|
||||
sink.write_all(header.as_bytes())?;
|
||||
sink.write_all(b"\n")?;
|
||||
sink.write_all(source)?;
|
||||
sink.write_all(b"\n\n")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn pnpm_run(args: &[&str]) -> Result<std::process::Output> {
|
||||
let cmd = "pnpm";
|
||||
let output = std::process::Command::new(cmd)
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|err| {
|
||||
eprintln!("Error: Failed to run '{cmd} {}'", args.join(" "));
|
||||
return err;
|
||||
})?;
|
||||
|
||||
let header = format!(
|
||||
"== {cmd} {} (cwd: {:?}) ==",
|
||||
args.join(" "),
|
||||
std::env::current_dir()?
|
||||
);
|
||||
write_output(std::io::stdout(), &output.stdout, &header)?;
|
||||
write_output(std::io::stderr(), &output.stderr, &header)?;
|
||||
|
||||
if !output.status.success() {
|
||||
let msg = format!(
|
||||
"Failed to run '{args:?}'\n\t{}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
fn is_true(v: &str) -> bool {
|
||||
return matches!(v.to_lowercase().as_str(), "true" | "1" | "");
|
||||
}
|
||||
|
||||
// NOTE: We don't want to break backend-builds on frontend errors, at least for dev builds.
|
||||
match env::var("SKIP_ERROR") {
|
||||
Ok(v) if is_true(&v) => warn!("{}", msg),
|
||||
_ => {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::Other, msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn build_js(path: &str) -> Result<()> {
|
||||
// We deliberately chose not to use "--frozen-lockfile" here, since this is not a CI use-case.
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
let _install_output = if out_dir.contains("target/package") {
|
||||
pnpm_run(&["--dir", path, "install", "--ignore-workspace"])?
|
||||
} else {
|
||||
pnpm_run(&["--dir", path, "install"])?
|
||||
};
|
||||
|
||||
let _build_output = pnpm_run(&["--dir", path, "build"])?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
use std::{io::Result, path::PathBuf};
|
||||
|
||||
fn main() -> Result<()> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
trailbase_build::init_env_logger();
|
||||
|
||||
// WARN: watching non-existent paths will also trigger rebuilds.
|
||||
println!("cargo::rerun-if-changed=js/client/src/");
|
||||
trailbase_build::rerun_if_changed("js/client/src/");
|
||||
|
||||
{
|
||||
let path = "js/admin";
|
||||
println!("cargo::rerun-if-changed={path}/src/components/");
|
||||
println!("cargo::rerun-if-changed={path}/src/lib/");
|
||||
build_js(path)?;
|
||||
let path = PathBuf::from("js/admin");
|
||||
trailbase_build::rerun_if_changed(path.join("src/components/"));
|
||||
trailbase_build::rerun_if_changed(path.join("src/lib/"));
|
||||
|
||||
trailbase_build::build_js(path)?;
|
||||
}
|
||||
|
||||
{
|
||||
let path = "js/auth";
|
||||
println!("cargo::rerun-if-changed={path}/src/components/");
|
||||
println!("cargo::rerun-if-changed={path}/src/lib/");
|
||||
println!("cargo::rerun-if-changed={path}/src/pages/");
|
||||
println!("cargo::rerun-if-changed={path}/src/layouts/");
|
||||
build_js(path)?;
|
||||
}
|
||||
let path = PathBuf::from("js/auth");
|
||||
trailbase_build::rerun_if_changed(path.join("src/components/"));
|
||||
trailbase_build::rerun_if_changed(path.join("src/lib/"));
|
||||
trailbase_build::rerun_if_changed(path.join("src/pages/"));
|
||||
trailbase_build::rerun_if_changed(path.join("src/layouts/"));
|
||||
|
||||
{
|
||||
println!("cargo::rerun-if-changed=js/runtime/src/");
|
||||
build_js("js/runtime")?;
|
||||
trailbase_build::build_js(path)?;
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
|
||||
@@ -16,7 +16,3 @@ pub struct AdminAssets;
|
||||
#[derive(RustEmbed, Clone)]
|
||||
#[folder = "js/auth/dist/"]
|
||||
pub struct AuthAssets;
|
||||
|
||||
#[derive(RustEmbed, Clone)]
|
||||
#[folder = "js/runtime/dist/"]
|
||||
pub struct JsRuntimeAssets;
|
||||
|
||||
15
trailbase-build/Cargo.toml
Normal file
15
trailbase-build/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "trailbase-build"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
license = "OSL-3.0"
|
||||
description = "Shared build.rs utilities for the TrailBase framework"
|
||||
homepage = "https://trailbase.io"
|
||||
repository = "https://github.com/trailbaseio/trailbase"
|
||||
readme = "../README.md"
|
||||
|
||||
[dependencies]
|
||||
env_logger = { workspace = true }
|
||||
log = "0.4.27"
|
||||
prost-build = "0.13.1"
|
||||
prost-reflect-build = "0.15.0"
|
||||
124
trailbase-build/src/lib.rs
Normal file
124
trailbase-build/src/lib.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
#![allow(clippy::needless_return)]
|
||||
|
||||
use log::*;
|
||||
use std::env;
|
||||
use std::fs::{self};
|
||||
use std::io::{Result, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub fn build_protos(proto_path: impl AsRef<Path>) -> Result<()> {
|
||||
let path = proto_path.as_ref().to_string_lossy();
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
|
||||
let prost_config = {
|
||||
let mut config = prost_build::Config::new();
|
||||
config.enum_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]");
|
||||
config
|
||||
};
|
||||
|
||||
let proto_files = vec![
|
||||
PathBuf::from(format!("{path}/config.proto")),
|
||||
PathBuf::from(format!("{path}/config_api.proto")),
|
||||
PathBuf::from(format!("{path}/vault.proto")),
|
||||
];
|
||||
|
||||
prost_reflect_build::Builder::new()
|
||||
.descriptor_pool("crate::DESCRIPTOR_POOL")
|
||||
.compile_protos_with_config(prost_config, &proto_files, &[proto_path])?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pub fn copy_dir(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
fs::create_dir_all(&dst)?;
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
if entry.file_name().to_string_lossy().starts_with(".") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if entry.file_type()?.is_dir() {
|
||||
copy_dir(entry.path(), dst.as_ref().join(entry.file_name()))?;
|
||||
} else {
|
||||
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn write_output(mut sink: impl Write, source: &[u8], header: &str) -> Result<()> {
|
||||
sink.write_all(header.as_bytes())?;
|
||||
sink.write_all(b"\n")?;
|
||||
sink.write_all(source)?;
|
||||
sink.write_all(b"\n\n")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pub fn pnpm_run(args: &[&str]) -> Result<std::process::Output> {
|
||||
let cmd = "pnpm";
|
||||
let output = std::process::Command::new(cmd)
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|err| {
|
||||
eprintln!("Error: Failed to run '{cmd} {}'", args.join(" "));
|
||||
return err;
|
||||
})?;
|
||||
|
||||
let header = format!(
|
||||
"== {cmd} {} (cwd: {:?}) ==",
|
||||
args.join(" "),
|
||||
std::env::current_dir()?
|
||||
);
|
||||
write_output(std::io::stdout(), &output.stdout, &header)?;
|
||||
write_output(std::io::stderr(), &output.stderr, &header)?;
|
||||
|
||||
if !output.status.success() {
|
||||
let msg = format!(
|
||||
"Failed to run '{args:?}'\n\t{}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
fn is_true(v: &str) -> bool {
|
||||
return matches!(v.to_lowercase().as_str(), "true" | "1" | "");
|
||||
}
|
||||
|
||||
// NOTE: We don't want to break backend-builds on frontend errors, at least for dev builds.
|
||||
match env::var("SKIP_ERROR") {
|
||||
Ok(v) if is_true(&v) => warn!("{}", msg),
|
||||
_ => {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::Other, msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn build_js(path: impl AsRef<Path>) -> Result<()> {
|
||||
let path = path.as_ref().to_string_lossy().to_string();
|
||||
// We deliberately choose not to use "--frozen-lockfile" here, since this is not a CI use-case.
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
let _install_output = if out_dir.contains("target/package") {
|
||||
pnpm_run(&["--dir", &path, "install", "--ignore-workspace"])?
|
||||
} else {
|
||||
pnpm_run(&["--dir", &path, "install"])?
|
||||
};
|
||||
|
||||
let _build_output = pnpm_run(&["--dir", &path, "build"])?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pub fn rerun_if_changed(path: impl AsRef<Path>) {
|
||||
let path_str = path.as_ref().to_string_lossy().to_string();
|
||||
// WARN: watching non-existent paths will also trigger rebuilds.
|
||||
if !std::fs::exists(path).unwrap_or(false) {
|
||||
panic!("Path '{path_str}' doesn't exist");
|
||||
}
|
||||
println!("cargo::rerun-if-changed={path_str}");
|
||||
}
|
||||
|
||||
pub fn init_env_logger() {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
}
|
||||
@@ -21,7 +21,7 @@ harness = false
|
||||
|
||||
[features]
|
||||
default = ["v8"]
|
||||
v8 = ["dep:rustyscript"]
|
||||
v8 = ["dep:trailbase-js"]
|
||||
queue = ["dep:apalis", "dep:trailbase-apalis"]
|
||||
|
||||
[dependencies]
|
||||
@@ -64,7 +64,6 @@ regex = "1.11.0"
|
||||
reqwest = { version = "0.12.8", default-features = false, features = ["rustls-tls", "json"] }
|
||||
rusqlite = { workspace = true }
|
||||
rustc_tools_util = "^0.4.2"
|
||||
rustyscript = { version = "^0.11.0", optional = true, features = ["web", "fs"] }
|
||||
serde = { version = "^1.0.203", features = ["derive"] }
|
||||
serde_json = "^1.0.117"
|
||||
serde_path_to_error = "0.1.16"
|
||||
@@ -79,11 +78,12 @@ tower = "0.5.0"
|
||||
tower-cookies = "0.11.0"
|
||||
tower-http = { version = "^0.6.0", default-features = false, features = ["cors", "trace", "fs", "limit"] }
|
||||
tower-service = { version = "0.3.3", default-features = false }
|
||||
tracing = { version = "0.1.40", default-features = false }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["smallvec", "std", "fmt", "json"] }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
trailbase-apalis = { workspace = true, optional = true }
|
||||
trailbase-assets = { workspace = true }
|
||||
trailbase-extension = { workspace = true }
|
||||
trailbase-js = { workspace = true, optional = true }
|
||||
trailbase-refinery-core = { workspace = true }
|
||||
trailbase-refinery-macros = { workspace = true }
|
||||
trailbase-schema = { workspace = true }
|
||||
@@ -95,9 +95,8 @@ uuid = { workspace = true }
|
||||
validator = { version = "0.20.0", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
prost-build = "0.13.1"
|
||||
prost-reflect-build = "0.15.0"
|
||||
rustc_tools_util = "^0.4.2"
|
||||
trailbase-build = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "^1.0.86"
|
||||
|
||||
@@ -1,35 +1,11 @@
|
||||
#![allow(clippy::needless_return)]
|
||||
|
||||
use std::io::Result;
|
||||
use std::path::PathBuf;
|
||||
fn main() -> std::io::Result<()> {
|
||||
trailbase_build::init_env_logger();
|
||||
|
||||
fn build_protos() -> Result<()> {
|
||||
const PROTO_PATH: &str = "./proto";
|
||||
println!("cargo::rerun-if-changed={PROTO_PATH}");
|
||||
|
||||
let prost_config = {
|
||||
let mut config = prost_build::Config::new();
|
||||
config.enum_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]");
|
||||
config
|
||||
};
|
||||
|
||||
let proto_files = vec![
|
||||
PathBuf::from(format!("{PROTO_PATH}/config.proto")),
|
||||
PathBuf::from(format!("{PROTO_PATH}/config_api.proto")),
|
||||
PathBuf::from(format!("{PROTO_PATH}/vault.proto")),
|
||||
];
|
||||
|
||||
prost_reflect_build::Builder::new()
|
||||
.descriptor_pool("crate::DESCRIPTOR_POOL")
|
||||
.compile_protos_with_config(prost_config, &proto_files, &[PathBuf::from(PROTO_PATH)])?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
rustc_tools_util::setup_version_info!();
|
||||
|
||||
build_protos()?;
|
||||
trailbase_build::build_protos("./proto")?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ pub enum AdminError {
|
||||
#[error("DB Migration error: {0}")]
|
||||
Migration(#[from] trailbase_refinery_core::Error),
|
||||
#[error("SQL -> Json error: {0}")]
|
||||
Json(#[from] crate::records::sql_to_json::JsonError),
|
||||
Json(#[from] trailbase_sqlite::rows::JsonError),
|
||||
#[error("Schema error: {0}")]
|
||||
SchemaError(#[from] trailbase_schema::Error),
|
||||
#[error("Json -> SQL Params error: {0}")]
|
||||
|
||||
@@ -11,6 +11,7 @@ mod query;
|
||||
pub(crate) mod rows;
|
||||
mod table;
|
||||
pub(crate) mod user;
|
||||
mod util;
|
||||
|
||||
pub use error::AdminError;
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use axum::{Json, extract::State};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use trailbase_schema::sqlite::{Column, sqlite3_parse_into_statements};
|
||||
use trailbase_sqlite::rows::rows_to_json_arrays;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::admin::AdminError as Error;
|
||||
use crate::app_state::AppState;
|
||||
use crate::records::sql_to_json::rows_to_json_arrays;
|
||||
|
||||
#[derive(Debug, Default, Serialize, TS)]
|
||||
#[ts(export)]
|
||||
@@ -81,9 +81,13 @@ pub async fn query_handler(
|
||||
|
||||
let batched_rows = batched_rows_result.map_err(|err| Error::BadRequest(err.into()))?;
|
||||
if let Some(rows) = batched_rows {
|
||||
let (rows, columns) = rows_to_json_arrays(rows, 1024)?;
|
||||
let columns = crate::admin::util::rows_to_columns(&rows);
|
||||
let rows = rows_to_json_arrays(&rows)?;
|
||||
|
||||
return Ok(Json(QueryResponse { columns, rows }));
|
||||
return Ok(Json(QueryResponse {
|
||||
columns: Some(columns),
|
||||
rows,
|
||||
}));
|
||||
}
|
||||
return Ok(Json(QueryResponse::default()));
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use serde::Serialize;
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use trailbase_schema::sqlite::Column;
|
||||
use trailbase_sqlite::rows::rows_to_json_arrays;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::admin::AdminError as Error;
|
||||
@@ -12,7 +13,6 @@ use crate::listing::{
|
||||
Cursor, Order, QueryParseResult, WhereClause, build_filter_where_clause, limit_or_default,
|
||||
parse_and_sanitize_query,
|
||||
};
|
||||
use crate::records::sql_to_json::rows_to_json_arrays;
|
||||
use crate::table_metadata::{TableMetadata, TableOrViewMetadata};
|
||||
|
||||
#[derive(Debug, Serialize, TS)]
|
||||
@@ -120,7 +120,7 @@ pub async fn list_rows_handler(
|
||||
columns: match table_metadata {
|
||||
Some(ref metadata) if metadata.schema.virtual_table => {
|
||||
// Virtual TABLE case.
|
||||
columns.unwrap_or_else(Vec::new)
|
||||
columns
|
||||
}
|
||||
Some(ref metadata) => {
|
||||
// Non-virtual TABLE case.
|
||||
@@ -132,7 +132,7 @@ pub async fn list_rows_handler(
|
||||
columns.to_vec()
|
||||
} else {
|
||||
debug!("Falling back to inferred cols for view: '{table_name}'");
|
||||
columns.unwrap_or_else(Vec::new)
|
||||
columns
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -153,7 +153,7 @@ async fn fetch_rows(
|
||||
filter_where_clause: WhereClause,
|
||||
order: Option<Vec<(String, Order)>>,
|
||||
pagination: Pagination<'_>,
|
||||
) -> Result<(Vec<Vec<serde_json::Value>>, Option<Vec<Column>>), Error> {
|
||||
) -> Result<(Vec<Vec<serde_json::Value>>, Vec<Column>), Error> {
|
||||
let WhereClause {
|
||||
mut clause,
|
||||
mut params,
|
||||
@@ -219,7 +219,10 @@ async fn fetch_rows(
|
||||
return err;
|
||||
})?;
|
||||
|
||||
return Ok(rows_to_json_arrays(result_rows, 1024)?);
|
||||
return Ok((
|
||||
rows_to_json_arrays(&result_rows)?,
|
||||
crate::admin::util::rows_to_columns(&result_rows),
|
||||
));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -272,7 +275,7 @@ mod tests {
|
||||
|
||||
state.table_metadata().invalidate_all().await.unwrap();
|
||||
|
||||
let (data, maybe_cols) = fetch_rows(
|
||||
let (data, cols) = fetch_rows(
|
||||
conn,
|
||||
"test_table",
|
||||
WhereClause {
|
||||
@@ -290,7 +293,6 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let cols = maybe_cols.unwrap();
|
||||
assert_eq!(cols.len(), 4);
|
||||
|
||||
let row = data.get(0).unwrap();
|
||||
|
||||
27
trailbase-core/src/admin/util.rs
Normal file
27
trailbase-core/src/admin/util.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use trailbase_schema::sqlite::{Column, ColumnDataType};
|
||||
use trailbase_sqlite::ValueType;
|
||||
use trailbase_sqlite::rows::Rows;
|
||||
|
||||
/// Best-effort conversion from row values to column definition.
|
||||
///
|
||||
/// WARN: This is lossy and whenever possible we should rely on parsed "CREATE TABLE" statement for
|
||||
/// the respective column.
|
||||
pub(crate) fn rows_to_columns(rows: &Rows) -> Vec<Column> {
|
||||
let mut columns: Vec<Column> = vec![];
|
||||
for i in 0..rows.column_count() {
|
||||
columns.push(Column {
|
||||
name: rows.column_name(i).unwrap_or("<missing>").to_string(),
|
||||
data_type: match rows.column_type(i).unwrap_or(ValueType::Null) {
|
||||
ValueType::Real => ColumnDataType::Real,
|
||||
ValueType::Text => ColumnDataType::Text,
|
||||
ValueType::Integer => ColumnDataType::Integer,
|
||||
ValueType::Null => ColumnDataType::Null,
|
||||
ValueType::Blob => ColumnDataType::Blob,
|
||||
},
|
||||
// We cannot derive the options from a row of data.
|
||||
options: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
return columns;
|
||||
}
|
||||
@@ -459,7 +459,10 @@ fn build_js_runtime(conn: trailbase_sqlite::Connection, threads: Option<usize>)
|
||||
RuntimeHandle::new()
|
||||
};
|
||||
|
||||
runtime.set_connection(conn);
|
||||
#[cfg(test)]
|
||||
runtime.set_connection(conn, true);
|
||||
#[cfg(not(test))]
|
||||
runtime.set_connection(conn, false);
|
||||
|
||||
return runtime;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#[cfg(feature = "v8")]
|
||||
mod import_provider;
|
||||
|
||||
#[cfg(feature = "v8")]
|
||||
mod runtime;
|
||||
pub(crate) mod runtime;
|
||||
|
||||
#[cfg(not(feature = "v8"))]
|
||||
mod fallback {
|
||||
@@ -10,7 +7,7 @@ mod fallback {
|
||||
pub(crate) struct RuntimeHandle {}
|
||||
|
||||
impl RuntimeHandle {
|
||||
pub(crate) fn set_connection(&self, _conn: trailbase_sqlite::Connection) {}
|
||||
pub(crate) fn set_connection(&self, _conn: trailbase_sqlite::Connection, r#override: bool) {}
|
||||
|
||||
pub(crate) fn new() -> Self {
|
||||
return Self {};
|
||||
@@ -23,7 +20,7 @@ mod fallback {
|
||||
}
|
||||
|
||||
#[cfg(feature = "v8")]
|
||||
pub use runtime::*;
|
||||
pub use trailbase_js::runtime::RuntimeHandle;
|
||||
|
||||
#[cfg(not(feature = "v8"))]
|
||||
pub use fallback::*;
|
||||
|
||||
@@ -1,541 +1,23 @@
|
||||
use axum::Router;
|
||||
use axum::body::Body;
|
||||
use axum::extract::{RawPathParams, Request};
|
||||
use axum::http::{HeaderName, HeaderValue, StatusCode, header::CONTENT_TYPE, request::Parts};
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use axum::http::{HeaderName, HeaderValue, request::Parts};
|
||||
use axum::response::Response;
|
||||
use futures_util::FutureExt;
|
||||
use log::*;
|
||||
use parking_lot::Mutex;
|
||||
use rustyscript::{
|
||||
Error as RSError, Module, Runtime, deno_core::PollEventLoopOptions, init_platform,
|
||||
js_value::Promise, json_args,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::from_value;
|
||||
use std::collections::HashSet;
|
||||
use std::str::FromStr;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
use thiserror::Error;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing_subscriber::prelude::*;
|
||||
use trailbase_assets::JsRuntimeAssets;
|
||||
|
||||
use trailbase_js::runtime::{
|
||||
DispatchArgs, Error as RSError, JsHttpResponse, JsHttpResponseError, JsUser, Message, Module,
|
||||
Runtime, RuntimeHandle, get_arg,
|
||||
};
|
||||
|
||||
use crate::AppState;
|
||||
use crate::auth::user::User;
|
||||
use crate::records::sql_to_json::rows_to_json_arrays;
|
||||
use crate::util::cow_to_string;
|
||||
use crate::{AppState, DataDir};
|
||||
|
||||
type AnyError = Box<dyn std::error::Error + Send + Sync>;
|
||||
|
||||
#[derive(Deserialize, Default, Debug)]
|
||||
struct JsResponse {
|
||||
headers: Option<Vec<(String, String)>>,
|
||||
status: Option<u16>,
|
||||
body: Option<bytes::Bytes>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JsResponseError {
|
||||
#[error("Precondition: {0}")]
|
||||
Precondition(String),
|
||||
#[error("Internal: {0}")]
|
||||
Internal(Box<dyn std::error::Error + Send + Sync>),
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JsUser {
|
||||
// Base64 encoded user id.
|
||||
id: String,
|
||||
email: String,
|
||||
csrf: String,
|
||||
}
|
||||
|
||||
struct DispatchArgs {
|
||||
method: String,
|
||||
route_path: String,
|
||||
uri: String,
|
||||
path_params: Vec<(String, String)>,
|
||||
headers: Vec<(String, String)>,
|
||||
user: Option<JsUser>,
|
||||
body: bytes::Bytes,
|
||||
|
||||
reply: oneshot::Sender<Result<JsResponse, JsResponseError>>,
|
||||
}
|
||||
|
||||
enum Message {
|
||||
Run(Box<dyn (FnOnce(&mut Runtime)) + Send + Sync>),
|
||||
HttpDispatch(DispatchArgs),
|
||||
CallFunction(
|
||||
Option<Module>,
|
||||
&'static str,
|
||||
Vec<serde_json::Value>,
|
||||
oneshot::Sender<Result<serde_json::Value, AnyError>>,
|
||||
),
|
||||
LoadModule(Module, oneshot::Sender<Result<(), AnyError>>),
|
||||
}
|
||||
|
||||
struct State {
|
||||
private_sender: kanal::AsyncSender<Message>,
|
||||
connection: Mutex<Option<trailbase_sqlite::Connection>>,
|
||||
}
|
||||
|
||||
struct RuntimeSingleton {
|
||||
n_threads: usize,
|
||||
|
||||
// Thread handle
|
||||
handle: Option<std::thread::JoinHandle<()>>,
|
||||
|
||||
// Shared sender.
|
||||
shared_sender: kanal::AsyncSender<Message>,
|
||||
|
||||
// Isolate state.
|
||||
state: Vec<State>,
|
||||
}
|
||||
|
||||
impl Drop for RuntimeSingleton {
|
||||
fn drop(&mut self) {
|
||||
if let Some(handle) = self.handle.take() {
|
||||
self.state.clear();
|
||||
if let Err(err) = handle.join() {
|
||||
error!("Failed to join main rt thread: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Completer {
|
||||
name: String,
|
||||
promise: Promise<JsResponse>,
|
||||
reply: oneshot::Sender<Result<JsResponse, JsResponseError>>,
|
||||
}
|
||||
|
||||
impl Completer {
|
||||
fn is_ready(&self, runtime: &mut Runtime) -> bool {
|
||||
return !self.promise.is_pending(runtime);
|
||||
}
|
||||
|
||||
async fn resolve(self, runtime: &mut Runtime) {
|
||||
let value = self
|
||||
.promise
|
||||
.into_future(runtime)
|
||||
.await
|
||||
.map_err(|err| JsResponseError::Internal(err.into()));
|
||||
|
||||
if self.reply.send(value).is_err() {
|
||||
error!("Completer send failed for : {}", self.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RuntimeSingleton {
|
||||
async fn handle_message(
|
||||
runtime: &mut Runtime,
|
||||
msg: Message,
|
||||
completers: &mut Vec<Completer>,
|
||||
) -> Result<(), AnyError> {
|
||||
match msg {
|
||||
Message::Run(f) => {
|
||||
f(runtime);
|
||||
}
|
||||
Message::HttpDispatch(args) => {
|
||||
let channel = args.reply;
|
||||
let uri = args.uri.clone();
|
||||
let promise = match runtime.call_function_immediate::<Promise<JsResponse>>(
|
||||
None,
|
||||
"__dispatch",
|
||||
json_args!(
|
||||
args.method,
|
||||
args.route_path,
|
||||
args.uri,
|
||||
args.path_params,
|
||||
args.headers,
|
||||
args.user,
|
||||
args.body
|
||||
),
|
||||
) {
|
||||
Ok(promise) => promise,
|
||||
Err(err) => {
|
||||
if channel
|
||||
.send(Err(JsResponseError::Internal(err.into())))
|
||||
.is_err()
|
||||
{
|
||||
error!("dispatch sending error failed");
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
completers.push(Completer {
|
||||
name: uri,
|
||||
promise,
|
||||
reply: channel,
|
||||
});
|
||||
}
|
||||
Message::CallFunction(module, name, args, sender) => {
|
||||
let module_handle = if let Some(module) = module {
|
||||
runtime.load_module_async(&module).await.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let result: Result<serde_json::Value, AnyError> = runtime
|
||||
.call_function_async::<serde_json::Value>(module_handle.as_ref(), name, &args)
|
||||
.await
|
||||
.map_err(|err| err.into());
|
||||
|
||||
if sender.send(result).is_err() {
|
||||
error!("Sending of js function call reply failed");
|
||||
}
|
||||
}
|
||||
Message::LoadModule(module, sender) => {
|
||||
runtime.load_module_async(&module).await?;
|
||||
if sender.send(Ok(())).is_err() {
|
||||
error!("Load module send failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn event_loop(
|
||||
runtime: &mut Runtime,
|
||||
private_recv: kanal::AsyncReceiver<Message>,
|
||||
shared_recv: kanal::AsyncReceiver<Message>,
|
||||
) {
|
||||
runtime.tokio_runtime().block_on(async {
|
||||
let mut completers: Vec<Completer> = vec![];
|
||||
|
||||
loop {
|
||||
let completed = completers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(idx, completer)| {
|
||||
if completer.is_ready(runtime) {
|
||||
Some(idx)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for index in completed.into_iter().rev() {
|
||||
let completer = completers.swap_remove(index);
|
||||
completer.resolve(runtime).await;
|
||||
}
|
||||
let pending = !completers.is_empty();
|
||||
|
||||
const DURATION: Option<Duration> = Some(Duration::from_millis(25));
|
||||
const OPTS: PollEventLoopOptions = PollEventLoopOptions {
|
||||
wait_for_inspector: false,
|
||||
pump_v8_message_loop: true,
|
||||
};
|
||||
|
||||
tokio::select! {
|
||||
result = runtime.await_event_loop(OPTS, DURATION), if pending => {
|
||||
if let Err(err) = result{
|
||||
error!("JS event loop: {err}");
|
||||
}
|
||||
},
|
||||
msg = private_recv.recv() => {
|
||||
let Ok(msg) = msg else {
|
||||
panic!("private channel closed");
|
||||
};
|
||||
if let Err(err) = Self::handle_message(runtime, msg, &mut completers).await {
|
||||
error!("Handle private message: {err}");
|
||||
}
|
||||
},
|
||||
msg = shared_recv.recv() => {
|
||||
let Ok(msg) = msg else {
|
||||
panic!("private channel closed");
|
||||
};
|
||||
if let Err(err) = Self::handle_message(runtime, msg, &mut completers).await {
|
||||
error!("Handle shared message: {err}");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Bring up `threads` worker/isolate threads with basic setup.
|
||||
///
|
||||
/// NOTE: functions to install routes and jobs are registered later, we need an AppState first.
|
||||
fn new_with_threads(threads: Option<usize>) -> Self {
|
||||
let n_threads = match threads {
|
||||
Some(n) => n,
|
||||
None => std::thread::available_parallelism().map_or_else(
|
||||
|err| {
|
||||
error!("Failed to get number of threads: {err}");
|
||||
return 1;
|
||||
},
|
||||
|x| x.get(),
|
||||
),
|
||||
};
|
||||
|
||||
info!("Starting v8 JavaScript runtime with {n_threads} workers.");
|
||||
|
||||
let (shared_sender, shared_receiver) = kanal::unbounded_async::<Message>();
|
||||
|
||||
let (state, receivers): (Vec<State>, Vec<kanal::AsyncReceiver<Message>>) = (0..n_threads)
|
||||
.map(|_index| {
|
||||
let (sender, receiver) = kanal::unbounded_async::<Message>();
|
||||
|
||||
return (
|
||||
State {
|
||||
private_sender: sender,
|
||||
connection: Mutex::new(None),
|
||||
},
|
||||
receiver,
|
||||
);
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let handle = if n_threads > 0 {
|
||||
Some(std::thread::spawn(move || {
|
||||
// swc_ecma_codegen is very spammy (or at least used to be):
|
||||
// https://github.com/swc-project/swc/pull/9604
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::Targets::new().with_target(
|
||||
"tracing::span",
|
||||
tracing_subscriber::filter::LevelFilter::WARN,
|
||||
))
|
||||
.set_default();
|
||||
|
||||
init_platform(n_threads as u32, true);
|
||||
|
||||
let threads: Vec<_> = receivers
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, receiver)| {
|
||||
let shared_receiver = shared_receiver.clone();
|
||||
|
||||
return std::thread::spawn(move || {
|
||||
let tokio_runtime = std::rc::Rc::new(
|
||||
tokio::runtime::Builder::new_current_thread()
|
||||
.enable_time()
|
||||
.enable_io()
|
||||
.thread_name("v8-runtime")
|
||||
.build()
|
||||
.expect("startup"),
|
||||
);
|
||||
|
||||
let mut js_runtime = match Self::init_runtime(index, tokio_runtime.clone()) {
|
||||
Ok(js_runtime) => js_runtime,
|
||||
Err(err) => {
|
||||
panic!("Failed to init v8 runtime on thread {index}: {err}");
|
||||
}
|
||||
};
|
||||
|
||||
Self::event_loop(&mut js_runtime, receiver, shared_receiver);
|
||||
});
|
||||
})
|
||||
.collect();
|
||||
|
||||
for (idx, thread) in threads.into_iter().enumerate() {
|
||||
if let Err(err) = thread.join() {
|
||||
error!("Failed to join worker: {idx}: {err:?}");
|
||||
}
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
return RuntimeSingleton {
|
||||
n_threads,
|
||||
shared_sender,
|
||||
handle,
|
||||
state,
|
||||
};
|
||||
}
|
||||
|
||||
fn init_runtime(
|
||||
index: usize,
|
||||
tokio_runtime: std::rc::Rc<tokio::runtime::Runtime>,
|
||||
) -> Result<Runtime, AnyError> {
|
||||
let mut runtime = rustyscript::Runtime::with_tokio_runtime(
|
||||
rustyscript::RuntimeOptions {
|
||||
import_provider: Some(Box::new(crate::js::import_provider::ImportProviderImpl)),
|
||||
schema_whlist: HashSet::from(["trailbase".to_string()]),
|
||||
..Default::default()
|
||||
},
|
||||
tokio_runtime,
|
||||
)?;
|
||||
|
||||
runtime
|
||||
.register_function("isolate_id", move |_args: &[serde_json::Value]| {
|
||||
return Ok(serde_json::json!(index));
|
||||
})
|
||||
.expect("Failed to register 'isolate_id' function");
|
||||
|
||||
runtime.register_async_function("query", move |args: Vec<serde_json::Value>| {
|
||||
Box::pin(async move {
|
||||
let query: String = get_arg(&args, 0)?;
|
||||
let params = json_values_to_params(get_arg(&args, 1)?)?;
|
||||
|
||||
let Some(conn) = get_runtime(None).state[index].connection.lock().clone() else {
|
||||
return Err(rustyscript::Error::Runtime(
|
||||
"missing db connection".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let rows = conn
|
||||
.write_query_rows(query, params)
|
||||
.await
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
let (values, _columns) = rows_to_json_arrays(rows, usize::MAX)
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
return Ok(serde_json::json!(values));
|
||||
})
|
||||
})?;
|
||||
|
||||
runtime.register_async_function("execute", move |args: Vec<serde_json::Value>| {
|
||||
Box::pin(async move {
|
||||
let query: String = get_arg(&args, 0)?;
|
||||
let params = json_values_to_params(get_arg(&args, 1)?)?;
|
||||
|
||||
let Some(conn) = get_runtime(None).state[index].connection.lock().clone() else {
|
||||
return Err(rustyscript::Error::Runtime(
|
||||
"missing db connection".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let rows_affected = conn
|
||||
.execute(query, params)
|
||||
.await
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
return Ok(serde_json::Value::Number(rows_affected.into()));
|
||||
})
|
||||
})?;
|
||||
|
||||
return Ok(runtime);
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: Repeated runtime initialization, e.g. in a multi-threaded context, leads to segfaults.
|
||||
// rustyscript::init_platform is supposed to help with this but we haven't found a way to
|
||||
// make it work. Thus, we're making the V8 VM a singleton (like Dart's).
|
||||
fn get_runtime(n_threads: Option<usize>) -> &'static RuntimeSingleton {
|
||||
static RUNTIME: OnceLock<RuntimeSingleton> = OnceLock::new();
|
||||
return RUNTIME.get_or_init(move || RuntimeSingleton::new_with_threads(n_threads));
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RuntimeHandle {
|
||||
runtime: &'static RuntimeSingleton,
|
||||
}
|
||||
|
||||
impl RuntimeHandle {
|
||||
pub(crate) fn set_connection(&self, conn: trailbase_sqlite::Connection) {
|
||||
for s in &self.runtime.state {
|
||||
let mut lock = s.connection.lock();
|
||||
if lock.is_some() {
|
||||
#[cfg(not(test))]
|
||||
panic!("connection already set");
|
||||
|
||||
#[cfg(test)]
|
||||
debug!("connection already set");
|
||||
} else {
|
||||
lock.replace(conn.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new() -> Self {
|
||||
return Self {
|
||||
runtime: get_runtime(None),
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) fn new_with_threads(n_threads: usize) -> Self {
|
||||
return Self {
|
||||
runtime: get_runtime(Some(n_threads)),
|
||||
};
|
||||
}
|
||||
|
||||
fn state(&self) -> &'static Vec<State> {
|
||||
return &self.runtime.state;
|
||||
}
|
||||
}
|
||||
|
||||
async fn call_function<T>(
|
||||
sender: &kanal::AsyncSender<Message>,
|
||||
module: Option<Module>,
|
||||
name: &'static str,
|
||||
args: Vec<serde_json::Value>,
|
||||
) -> Result<T, AnyError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
let (resp_sender, resp_receiver) = oneshot::channel::<Result<serde_json::Value, AnyError>>();
|
||||
sender
|
||||
.send(Message::CallFunction(module, name, args, resp_sender))
|
||||
.await?;
|
||||
|
||||
return Ok(serde_json::from_value::<T>(resp_receiver.await??)?);
|
||||
}
|
||||
|
||||
fn json_value_to_param(
|
||||
value: serde_json::Value,
|
||||
) -> Result<trailbase_sqlite::Value, rustyscript::Error> {
|
||||
use rustyscript::Error;
|
||||
return Ok(match value {
|
||||
serde_json::Value::Object(ref _map) => {
|
||||
return Err(Error::Runtime("Object unsupported".to_string()));
|
||||
}
|
||||
serde_json::Value::Array(ref _arr) => {
|
||||
return Err(Error::Runtime("Array unsupported".to_string()));
|
||||
}
|
||||
serde_json::Value::Null => trailbase_sqlite::Value::Null,
|
||||
serde_json::Value::Bool(b) => trailbase_sqlite::Value::Integer(b as i64),
|
||||
serde_json::Value::String(str) => trailbase_sqlite::Value::Text(str),
|
||||
serde_json::Value::Number(number) => {
|
||||
if let Some(n) = number.as_i64() {
|
||||
trailbase_sqlite::Value::Integer(n)
|
||||
} else if let Some(n) = number.as_u64() {
|
||||
trailbase_sqlite::Value::Integer(n as i64)
|
||||
} else if let Some(n) = number.as_f64() {
|
||||
trailbase_sqlite::Value::Real(n)
|
||||
} else {
|
||||
return Err(Error::Runtime(format!("invalid number: {number:?}")));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn json_values_to_params(
|
||||
values: Vec<serde_json::Value>,
|
||||
) -> Result<Vec<trailbase_sqlite::Value>, rustyscript::Error> {
|
||||
return values.into_iter().map(json_value_to_param).collect();
|
||||
}
|
||||
|
||||
impl IntoResponse for JsResponseError {
|
||||
fn into_response(self) -> Response {
|
||||
let (status, body): (StatusCode, Option<String>) = match self {
|
||||
Self::Precondition(err) => (StatusCode::PRECONDITION_FAILED, Some(err.to_string())),
|
||||
Self::Internal(err) => (StatusCode::INTERNAL_SERVER_ERROR, Some(err.to_string())),
|
||||
};
|
||||
|
||||
if let Some(body) = body {
|
||||
return Response::builder()
|
||||
.status(status)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.body(Body::new(body))
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
return Response::builder()
|
||||
.status(status)
|
||||
.body(Body::empty())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
|
||||
/// Get's called from JS during `addRoute` and installs an axum HTTP handler.
|
||||
///
|
||||
/// The axum HTTP handler will then call back into the registered callback in JS.
|
||||
@@ -551,7 +33,7 @@ fn add_route_to_router(
|
||||
let (parts, body) = req.into_parts();
|
||||
|
||||
let Ok(body_bytes) = axum::body::to_bytes(body, usize::MAX).await else {
|
||||
return Err(JsResponseError::Precondition(
|
||||
return Err(JsHttpResponseError::Precondition(
|
||||
"request deserialization failed".to_string(),
|
||||
));
|
||||
};
|
||||
@@ -579,13 +61,11 @@ fn add_route_to_router(
|
||||
csrf: u.csrf_token,
|
||||
});
|
||||
|
||||
let (sender, receiver) = oneshot::channel::<Result<JsResponse, JsResponseError>>();
|
||||
let (sender, receiver) = oneshot::channel::<Result<JsHttpResponse, JsHttpResponseError>>();
|
||||
|
||||
debug!("dispatch {method} {uri}");
|
||||
runtime_handle
|
||||
.runtime
|
||||
.shared_sender
|
||||
.send(Message::HttpDispatch(DispatchArgs {
|
||||
.send_to_any_isolate(Message::HttpDispatch(DispatchArgs {
|
||||
method,
|
||||
route_path,
|
||||
uri: uri.to_string(),
|
||||
@@ -596,24 +76,24 @@ fn add_route_to_router(
|
||||
reply: sender,
|
||||
}))
|
||||
.await
|
||||
.map_err(|_err| JsResponseError::Internal("send failed".into()))?;
|
||||
.map_err(|_err| JsHttpResponseError::Internal("send failed".into()))?;
|
||||
|
||||
let js_response = receiver
|
||||
.await
|
||||
.map_err(|_err| JsResponseError::Internal("receive failed".into()))??;
|
||||
.map_err(|_err| JsHttpResponseError::Internal("receive failed".into()))??;
|
||||
|
||||
let mut http_response = Response::builder()
|
||||
.status(js_response.status.unwrap_or(200))
|
||||
.body(Body::from(js_response.body.unwrap_or_default()))
|
||||
.map_err(|err| JsResponseError::Internal(err.into()))?;
|
||||
.map_err(|err| JsHttpResponseError::Internal(err.into()))?;
|
||||
|
||||
if let Some(headers) = js_response.headers {
|
||||
for (key, value) in headers {
|
||||
http_response.headers_mut().insert(
|
||||
HeaderName::from_str(key.as_str())
|
||||
.map_err(|err| JsResponseError::Internal(err.into()))?,
|
||||
.map_err(|err| JsHttpResponseError::Internal(err.into()))?,
|
||||
HeaderValue::from_str(value.as_str())
|
||||
.map_err(|err| JsResponseError::Internal(err.into()))?,
|
||||
.map_err(|err| JsHttpResponseError::Internal(err.into()))?,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -639,17 +119,6 @@ fn add_route_to_router(
|
||||
));
|
||||
}
|
||||
|
||||
fn get_arg<T>(args: &[serde_json::Value], i: usize) -> Result<T, rustyscript::Error>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
use rustyscript::Error;
|
||||
let arg = args
|
||||
.get(i)
|
||||
.ok_or_else(|| Error::Runtime(format!("Range err {i} > {}", args.len())))?;
|
||||
return from_value::<T>(arg.clone()).map_err(|err| Error::Runtime(err.to_string()));
|
||||
}
|
||||
|
||||
async fn install_routes_and_jobs(
|
||||
state: &AppState,
|
||||
module: Module,
|
||||
@@ -669,20 +138,19 @@ async fn install_routes_and_jobs(
|
||||
|
||||
let (router_sender, router_receiver) = kanal::unbounded::<Router<AppState>>();
|
||||
|
||||
let runtime_handle_clone = runtime_handle.clone();
|
||||
if let Err(err) = state
|
||||
.private_sender
|
||||
.send(Message::Run(Box::new(move |runtime: &mut Runtime| {
|
||||
.send_privately(Message::Run(Box::new(move |runtime: &mut Runtime| {
|
||||
// First install a native callbacks.
|
||||
//
|
||||
// Register native callback for building axum router.
|
||||
let runtime_handle_clone = runtime_handle.clone();
|
||||
runtime
|
||||
.register_function("install_route", move |args: &[serde_json::Value]| {
|
||||
let method: String = get_arg(args, 0)?;
|
||||
let route: String = get_arg(args, 1)?;
|
||||
|
||||
let router = add_route_to_router(runtime_handle_clone.clone(), method, route)
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
.map_err(|err| RSError::Runtime(err.to_string()))?;
|
||||
|
||||
router_sender.send(router).expect("send");
|
||||
|
||||
@@ -699,11 +167,7 @@ async fn install_routes_and_jobs(
|
||||
return RSError::Runtime(err.to_string());
|
||||
})?;
|
||||
|
||||
let Some(first_isolate) = runtime_handle.state().first() else {
|
||||
return Err(RSError::Runtime("Missing isolate".to_string()));
|
||||
};
|
||||
let first_isolate_sender = first_isolate.private_sender.clone();
|
||||
|
||||
let runtime_handle = runtime_handle.clone();
|
||||
let (id_sender, id_receiver) = oneshot::channel::<serde_json::Value>();
|
||||
let id_receiver = id_receiver.shared();
|
||||
|
||||
@@ -712,17 +176,26 @@ async fn install_routes_and_jobs(
|
||||
name,
|
||||
schedule,
|
||||
crate::scheduler::build_callback(move || {
|
||||
let first_isolate_sender = first_isolate_sender.clone();
|
||||
let runtime_handle = runtime_handle.clone();
|
||||
let id_receiver = id_receiver.clone();
|
||||
|
||||
return async move {
|
||||
if let Some(msg) = call_function::<Option<String>>(
|
||||
&first_isolate_sender,
|
||||
None,
|
||||
"__dispatchCron",
|
||||
vec![id_receiver.await?],
|
||||
)
|
||||
.await?
|
||||
let Some(first_isolate) = runtime_handle.state().first() else {
|
||||
return Err("Missing isolate".into());
|
||||
};
|
||||
|
||||
let (resp_sender, resp_receiver) = oneshot::channel();
|
||||
first_isolate
|
||||
.send_privately(Message::CallFunction(
|
||||
None,
|
||||
"__dispatchCron",
|
||||
vec![id_receiver.await?],
|
||||
resp_sender,
|
||||
))
|
||||
.await?;
|
||||
|
||||
if let Some(msg) =
|
||||
serde_json::from_value::<Option<String>>(resp_receiver.await??)?
|
||||
{
|
||||
return Err(msg.into());
|
||||
}
|
||||
@@ -751,7 +224,7 @@ async fn install_routes_and_jobs(
|
||||
}
|
||||
|
||||
// Then execute the script/module, i.e. statements in the file scope.
|
||||
if let Err(err) = await_loading_module(state, module).await {
|
||||
if let Err(err) = state.load_module(module).await {
|
||||
error!("Failed to load module: {err}");
|
||||
return None;
|
||||
}
|
||||
@@ -785,33 +258,17 @@ async fn install_routes_and_jobs(
|
||||
return Ok(receivers.swap_remove(0));
|
||||
}
|
||||
|
||||
async fn await_loading_module(state: &State, module: Module) -> Result<(), AnyError> {
|
||||
let (sender, receiver) = oneshot::channel::<Result<(), AnyError>>();
|
||||
|
||||
state
|
||||
.private_sender
|
||||
.send(Message::LoadModule(module, sender))
|
||||
.await?;
|
||||
|
||||
let _ = receiver.await.map_err(|err| {
|
||||
error!("Failed to await module loading: {err}");
|
||||
return err;
|
||||
})?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pub(crate) async fn load_routes_and_jobs_from_js_modules(
|
||||
state: &AppState,
|
||||
) -> Result<Option<Router<AppState>>, AnyError> {
|
||||
let runtime_handle = state.script_runtime();
|
||||
if runtime_handle.runtime.n_threads == 0 {
|
||||
if runtime_handle.num_threads() == 0 {
|
||||
info!("JS threads set to zero. Skipping initialization for JS modules");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let scripts_dir = state.data_dir().root().join("scripts");
|
||||
let modules = match rustyscript::Module::load_dir(scripts_dir.clone()) {
|
||||
let modules = match Module::load_dir(scripts_dir.clone()) {
|
||||
Ok(modules) => modules,
|
||||
Err(err) => {
|
||||
debug!("Skip loading js modules from '{scripts_dir:?}': {err}");
|
||||
@@ -836,194 +293,3 @@ pub(crate) async fn load_routes_and_jobs_from_js_modules(
|
||||
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
pub(crate) async fn write_js_runtime_files(data_dir: &DataDir) {
|
||||
if let Err(err) = tokio::fs::write(
|
||||
data_dir.root().join("trailbase.js"),
|
||||
cow_to_string(
|
||||
JsRuntimeAssets::get("index.js")
|
||||
.expect("Failed to read rt/index.js")
|
||||
.data,
|
||||
)
|
||||
.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("Failed to write 'trailbase.js': {err}");
|
||||
}
|
||||
|
||||
if let Err(err) = tokio::fs::write(
|
||||
data_dir.root().join("trailbase.d.ts"),
|
||||
cow_to_string(
|
||||
JsRuntimeAssets::get("index.d.ts")
|
||||
.expect("Failed to read rt/index.d.ts")
|
||||
.data,
|
||||
)
|
||||
.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("Failed to write 'trailbase.d.ts': {err}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rustyscript::Module;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_serial_tests() {
|
||||
// NOTE: needs to run serially since registration of SQLite connection with singleton v8
|
||||
// runtime is racy.
|
||||
test_runtime_apply().await;
|
||||
test_runtime_javascript().await;
|
||||
test_javascript_query().await;
|
||||
test_javascript_execute().await;
|
||||
}
|
||||
|
||||
async fn test_runtime_apply() {
|
||||
let (sender, receiver) = tokio::sync::oneshot::channel::<i64>();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
handle
|
||||
.runtime
|
||||
.shared_sender
|
||||
.send(Message::Run(Box::new(|_rt| {
|
||||
sender.send(5).unwrap();
|
||||
})))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(5, receiver.await.unwrap());
|
||||
}
|
||||
|
||||
async fn test_runtime_javascript() {
|
||||
let handle = RuntimeHandle::new();
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.js",
|
||||
r#"
|
||||
export function test_fun() {
|
||||
return "test0";
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = call_function::<String>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_fun",
|
||||
vec![],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!("test0", result);
|
||||
}
|
||||
|
||||
fn override_connection(handle: &RuntimeHandle, conn: trailbase_sqlite::Connection) {
|
||||
for s in &handle.runtime.state {
|
||||
let mut lock = s.connection.lock();
|
||||
if lock.is_some() {
|
||||
debug!("connection already set");
|
||||
}
|
||||
lock.replace(conn.clone());
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_javascript_query() {
|
||||
let conn = trailbase_sqlite::Connection::open_in_memory().unwrap();
|
||||
conn
|
||||
.execute("CREATE TABLE test (v0 TEXT, v1 INTEGER);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
conn
|
||||
.execute("INSERT INTO test (v0, v1) VALUES ('0', 0), ('1', 1);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
override_connection(&handle, conn);
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.ts",
|
||||
r#"
|
||||
import { query } from "trailbase:main";
|
||||
|
||||
export async function test_query(queryStr: string) : Promise<unknown[][]> {
|
||||
return await query(queryStr, []);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = call_function::<Vec<Vec<serde_json::Value>>>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_query",
|
||||
vec![serde_json::json!("SELECT * FROM test")],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec![
|
||||
vec![
|
||||
serde_json::Value::String("0".to_string()),
|
||||
serde_json::Value::Number(0.into())
|
||||
],
|
||||
vec![
|
||||
serde_json::Value::String("1".to_string()),
|
||||
serde_json::Value::Number(1.into())
|
||||
],
|
||||
],
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_javascript_execute() {
|
||||
let conn = trailbase_sqlite::Connection::open_in_memory().unwrap();
|
||||
conn
|
||||
.execute("CREATE TABLE test (v0 TEXT, v1 INTEGER);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
override_connection(&handle, conn.clone());
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.ts",
|
||||
r#"
|
||||
import { execute } from "trailbase:main";
|
||||
|
||||
export async function test_execute(queryStr: string) : Promise<number> {
|
||||
return await execute(queryStr, []);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let _result = call_function::<i64>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_execute",
|
||||
vec![serde_json::json!("DELETE FROM test")],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let count: i64 = conn
|
||||
.read_query_row_f("SELECT COUNT(*) FROM test", (), |row| row.get(0))
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(0, count);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -476,7 +476,7 @@ impl RecordApi {
|
||||
pub(crate) fn check_record_level_read_access_for_subscriptions(
|
||||
&self,
|
||||
conn: &rusqlite::Connection,
|
||||
record: &[(&str, rusqlite::types::ValueRef<'_>)],
|
||||
record: &[(&str, &rusqlite::types::Value)],
|
||||
user: Option<&User>,
|
||||
) -> Result<(), RecordError> {
|
||||
// First check table level access and if present check row-level access based on access rule.
|
||||
@@ -499,7 +499,7 @@ impl RecordApi {
|
||||
params.extend(record.iter().map(|(name, value)| {
|
||||
(
|
||||
Cow::Owned(prefix_colon(name)),
|
||||
ToSqlOutput::Borrowed(*value),
|
||||
ToSqlOutput::Borrowed((*value).into()),
|
||||
)
|
||||
}));
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use base64::prelude::*;
|
||||
use log::*;
|
||||
use std::collections::HashMap;
|
||||
use thiserror::Error;
|
||||
use trailbase_schema::sqlite::Column;
|
||||
use trailbase_schema::sqlite::ColumnOption;
|
||||
use trailbase_schema::sqlite::{Column, ColumnDataType};
|
||||
use trailbase_sqlite::rows::value_to_json;
|
||||
|
||||
use crate::table_metadata::JsonColumnMetadata;
|
||||
|
||||
@@ -11,10 +11,6 @@ use crate::table_metadata::JsonColumnMetadata;
|
||||
pub enum JsonError {
|
||||
#[error("SerdeJson error: {0}")]
|
||||
SerdeJson(#[from] serde_json::Error),
|
||||
#[error("Malformed bytes, len {0}")]
|
||||
MalformedBytes(usize),
|
||||
#[error("Row not found")]
|
||||
RowNotFound,
|
||||
#[error("Float not finite")]
|
||||
Finite,
|
||||
#[error("Value not found")]
|
||||
@@ -23,23 +19,13 @@ pub enum JsonError {
|
||||
MissingColumnName,
|
||||
}
|
||||
|
||||
pub(crate) fn valueref_to_json(
|
||||
value: rusqlite::types::ValueRef<'_>,
|
||||
) -> Result<serde_json::Value, JsonError> {
|
||||
use rusqlite::types::ValueRef;
|
||||
|
||||
return Ok(match value {
|
||||
ValueRef::Null => serde_json::Value::Null,
|
||||
ValueRef::Real(real) => {
|
||||
let Some(number) = serde_json::Number::from_f64(real) else {
|
||||
return Err(JsonError::Finite);
|
||||
};
|
||||
serde_json::Value::Number(number)
|
||||
}
|
||||
ValueRef::Integer(integer) => serde_json::Value::Number(serde_json::Number::from(integer)),
|
||||
ValueRef::Blob(blob) => serde_json::Value::String(BASE64_URL_SAFE.encode(blob)),
|
||||
ValueRef::Text(text) => serde_json::Value::String(String::from_utf8_lossy(text).to_string()),
|
||||
});
|
||||
impl From<trailbase_sqlite::rows::JsonError> for JsonError {
|
||||
fn from(value: trailbase_sqlite::rows::JsonError) -> Self {
|
||||
return match value {
|
||||
trailbase_sqlite::rows::JsonError::ValueNotFound => Self::ValueNotFound,
|
||||
trailbase_sqlite::rows::JsonError::Finite => Self::Finite,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize SQL row to json.
|
||||
@@ -91,10 +77,10 @@ pub fn row_to_json_expand(
|
||||
|
||||
if let Some(foreign_value) = expand.and_then(|e| e.get(column_name)) {
|
||||
if is_foreign_key(&column.options) {
|
||||
let id = match valueref_to_json(value.into()) {
|
||||
let id = match value_to_json(value) {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
return Some(Err(err));
|
||||
return Some(Err(err.into()));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -126,9 +112,9 @@ pub fn row_to_json_expand(
|
||||
}
|
||||
}
|
||||
|
||||
return match valueref_to_json(value.into()) {
|
||||
return match value_to_json(value) {
|
||||
Ok(value) => Some(Ok((column_name.to_string(), value))),
|
||||
Err(err) => Some(Err(err)),
|
||||
Err(err) => Some(Err(err.into())),
|
||||
};
|
||||
})
|
||||
.collect::<Result<serde_json::Map<_, _>, JsonError>>()?;
|
||||
@@ -163,70 +149,6 @@ pub fn rows_to_json_expand(
|
||||
.collect::<Result<Vec<_>, JsonError>>();
|
||||
}
|
||||
|
||||
pub fn row_to_json_array(row: &trailbase_sqlite::Row) -> Result<Vec<serde_json::Value>, JsonError> {
|
||||
let cols = row.column_count();
|
||||
let mut json_row = Vec::<serde_json::Value>::with_capacity(cols);
|
||||
|
||||
for i in 0..cols {
|
||||
let value = row.get_value(i).ok_or(JsonError::ValueNotFound)?;
|
||||
json_row.push(valueref_to_json(value.into())?);
|
||||
}
|
||||
|
||||
return Ok(json_row);
|
||||
}
|
||||
|
||||
/// Best-effort conversion from row values to column definition.
|
||||
///
|
||||
/// WARN: This is lossy and whenever possible we should rely on parsed "CREATE TABLE" statement for
|
||||
/// the respective column.
|
||||
fn rows_to_columns(rows: &trailbase_sqlite::Rows) -> Result<Vec<Column>, rusqlite::Error> {
|
||||
use trailbase_sqlite::ValueType as T;
|
||||
|
||||
let mut columns: Vec<Column> = vec![];
|
||||
for i in 0..rows.column_count() {
|
||||
columns.push(Column {
|
||||
name: rows.column_name(i).unwrap_or("<missing>").to_string(),
|
||||
data_type: match rows.column_type(i).unwrap_or(T::Null) {
|
||||
T::Real => ColumnDataType::Real,
|
||||
T::Text => ColumnDataType::Text,
|
||||
T::Integer => ColumnDataType::Integer,
|
||||
T::Null => ColumnDataType::Null,
|
||||
T::Blob => ColumnDataType::Blob,
|
||||
},
|
||||
// We cannot derive the options from a row of data.
|
||||
options: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
return Ok(columns);
|
||||
}
|
||||
|
||||
type Row = Vec<serde_json::Value>;
|
||||
|
||||
pub fn rows_to_json_arrays(
|
||||
rows: trailbase_sqlite::Rows,
|
||||
limit: usize,
|
||||
) -> Result<(Vec<Row>, Option<Vec<Column>>), JsonError> {
|
||||
let columns = match rows_to_columns(&rows) {
|
||||
Ok(columns) => Some(columns),
|
||||
Err(err) => {
|
||||
debug!("Failed to get column def: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let mut json_rows: Vec<Vec<serde_json::Value>> = vec![];
|
||||
for (idx, row) in rows.iter().enumerate() {
|
||||
if idx >= limit {
|
||||
break;
|
||||
}
|
||||
|
||||
json_rows.push(row_to_json_array(row)?);
|
||||
}
|
||||
|
||||
return Ok((json_rows, columns));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
|
||||
@@ -17,11 +17,11 @@ use std::sync::{
|
||||
};
|
||||
use std::task::{Context, Poll};
|
||||
use trailbase_sqlite::connection::{extract_record_values, extract_row_id};
|
||||
use trailbase_sqlite::rows::value_to_json;
|
||||
|
||||
use crate::AppState;
|
||||
use crate::auth::user::User;
|
||||
use crate::records::RecordApi;
|
||||
use crate::records::sql_to_json::valueref_to_json;
|
||||
use crate::records::{Permission, RecordError};
|
||||
use crate::table_metadata::{TableMetadata, TableMetadataCache};
|
||||
use crate::value_notifier::Computed;
|
||||
@@ -257,7 +257,7 @@ impl SubscriptionManager {
|
||||
conn: &rusqlite::Connection,
|
||||
subs: &[Subscription],
|
||||
record_subscriptions: bool,
|
||||
record: &[(&str, rusqlite::types::ValueRef<'_>)],
|
||||
record: &[(&str, &rusqlite::types::Value)],
|
||||
event: &Event,
|
||||
) -> Vec<usize> {
|
||||
let mut dead_subscriptions: Vec<usize> = vec![];
|
||||
@@ -330,10 +330,10 @@ impl SubscriptionManager {
|
||||
};
|
||||
|
||||
// Join values with column names.
|
||||
let record: Vec<(&str, rusqlite::types::ValueRef<'_>)> = record_values
|
||||
let record: Vec<(&str, &rusqlite::types::Value)> = record_values
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, v)| (table_metadata.schema.columns[idx].name.as_str(), v.into()))
|
||||
.map(|(idx, v)| (table_metadata.schema.columns[idx].name.as_str(), v))
|
||||
.collect();
|
||||
|
||||
// Build a JSON-encoded SQLite event (insert, update, delete).
|
||||
@@ -342,7 +342,7 @@ impl SubscriptionManager {
|
||||
record
|
||||
.iter()
|
||||
.filter_map(|(name, value)| {
|
||||
if let Ok(v) = valueref_to_json(*value) {
|
||||
if let Ok(v) = value_to_json(value) {
|
||||
return Some(((*name).to_string(), v));
|
||||
};
|
||||
return None;
|
||||
|
||||
@@ -110,7 +110,7 @@ pub async fn init_app_state(
|
||||
|
||||
// Write out the latest .js/.d.ts runtime files.
|
||||
#[cfg(feature = "v8")]
|
||||
crate::js::write_js_runtime_files(&data_dir).await;
|
||||
trailbase_js::runtime::write_js_runtime_files(data_dir.root()).await;
|
||||
|
||||
let app_state = AppState::new(AppStateArgs {
|
||||
data_dir: data_dir.clone(),
|
||||
|
||||
@@ -166,7 +166,7 @@ impl Server {
|
||||
|
||||
#[cfg(feature = "v8")]
|
||||
let js_routes: Option<Router<AppState>> =
|
||||
crate::js::load_routes_and_jobs_from_js_modules(&state)
|
||||
crate::js::runtime::load_routes_and_jobs_from_js_modules(&state)
|
||||
.await
|
||||
.map_err(|err| InitError::ScriptError(err.to_string()))?;
|
||||
|
||||
|
||||
31
trailbase-js/Cargo.toml
Normal file
31
trailbase-js/Cargo.toml
Normal file
@@ -0,0 +1,31 @@
|
||||
[package]
|
||||
name = "trailbase-js"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
license = "OSL-3.0"
|
||||
description = "JS runtime for the TrailBase framework"
|
||||
homepage = "https://trailbase.io"
|
||||
readme = "../README.md"
|
||||
exclude = [
|
||||
"**/node_modules/",
|
||||
"**/dist/",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
axum = { workspace = true }
|
||||
bytes = { version = "1.8.0", features = ["serde"] }
|
||||
futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
|
||||
kanal = "0.1.1"
|
||||
log = { version = "^0.4.21", default-features = false }
|
||||
parking_lot = { version = "0.12.3", default-features = false }
|
||||
rust-embed = { workspace = true }
|
||||
rustyscript = { version = "^0.11.0", features = ["web", "fs"] }
|
||||
serde = { version = "^1.0.203", features = ["derive"] }
|
||||
serde_json = "^1.0.117"
|
||||
thiserror = "2.0.1"
|
||||
tokio = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
trailbase-sqlite = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
trailbase-build = { workspace = true }
|
||||
14
trailbase-js/build.rs
Normal file
14
trailbase-js/build.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
#![allow(clippy::needless_return)]
|
||||
|
||||
use std::{io::Result, path::PathBuf};
|
||||
|
||||
fn main() -> Result<()> {
|
||||
trailbase_build::init_env_logger();
|
||||
|
||||
let path = PathBuf::from("assets/runtime");
|
||||
trailbase_build::rerun_if_changed(path.join("src/"));
|
||||
|
||||
trailbase_build::build_js(path)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
@@ -2,14 +2,14 @@ use rustyscript::deno_core::{
|
||||
ModuleSpecifier, RequestedModuleType, ResolutionKind,
|
||||
anyhow::{Error, anyhow},
|
||||
};
|
||||
use rustyscript::module_loader::ImportProvider;
|
||||
use rustyscript::module_loader::ImportProvider as RustyScriptImportProvider;
|
||||
|
||||
use crate::util::cow_to_string;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct ImportProviderImpl;
|
||||
pub struct ImportProvider;
|
||||
|
||||
impl ImportProvider for ImportProviderImpl {
|
||||
impl RustyScriptImportProvider for ImportProvider {
|
||||
fn resolve(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
@@ -41,7 +41,7 @@ impl ImportProvider for ImportProviderImpl {
|
||||
match specifier.scheme() {
|
||||
"trailbase" => {
|
||||
return Some(Ok(cow_to_string(
|
||||
trailbase_assets::JsRuntimeAssets::get("index.js")
|
||||
crate::JsRuntimeAssets::get("index.js")
|
||||
.expect("Failed to read rt/index.js")
|
||||
.data,
|
||||
)));
|
||||
13
trailbase-js/src/lib.rs
Normal file
13
trailbase-js/src/lib.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
#![forbid(unsafe_code, clippy::unwrap_used)]
|
||||
#![allow(clippy::needless_return)]
|
||||
#![warn(clippy::await_holding_lock, clippy::inefficient_to_string)]
|
||||
|
||||
pub mod import_provider;
|
||||
pub mod runtime;
|
||||
mod util;
|
||||
|
||||
pub use crate::import_provider::ImportProvider;
|
||||
|
||||
#[derive(rust_embed::RustEmbed, Clone)]
|
||||
#[folder = "assets/runtime/dist/"]
|
||||
pub struct JsRuntimeAssets;
|
||||
770
trailbase-js/src/runtime.rs
Normal file
770
trailbase-js/src/runtime.rs
Normal file
@@ -0,0 +1,770 @@
|
||||
use axum::body::Body;
|
||||
use axum::http::{StatusCode, header::CONTENT_TYPE};
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use log::*;
|
||||
use parking_lot::Mutex;
|
||||
use rustyscript::{deno_core::PollEventLoopOptions, init_platform, js_value::Promise, json_args};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
use thiserror::Error;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing_subscriber::prelude::*;
|
||||
use trailbase_sqlite::rows::{JsonError, row_to_json_array};
|
||||
|
||||
use crate::JsRuntimeAssets;
|
||||
use crate::util::cow_to_string;
|
||||
|
||||
pub use rustyscript::{Error, Module, Runtime};
|
||||
|
||||
type AnyError = Box<dyn std::error::Error + Send + Sync>;
|
||||
|
||||
#[derive(Deserialize, Default, Debug)]
|
||||
pub struct JsHttpResponse {
|
||||
pub headers: Option<Vec<(String, String)>>,
|
||||
pub status: Option<u16>,
|
||||
pub body: Option<bytes::Bytes>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JsHttpResponseError {
|
||||
#[error("Precondition: {0}")]
|
||||
Precondition(String),
|
||||
#[error("Internal: {0}")]
|
||||
Internal(Box<dyn std::error::Error + Send + Sync>),
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct JsUser {
|
||||
// Base64 encoded user id.
|
||||
pub id: String,
|
||||
pub email: String,
|
||||
pub csrf: String,
|
||||
}
|
||||
|
||||
pub struct DispatchArgs {
|
||||
pub method: String,
|
||||
pub route_path: String,
|
||||
pub uri: String,
|
||||
pub path_params: Vec<(String, String)>,
|
||||
pub headers: Vec<(String, String)>,
|
||||
pub user: Option<JsUser>,
|
||||
pub body: bytes::Bytes,
|
||||
|
||||
pub reply: oneshot::Sender<Result<JsHttpResponse, JsHttpResponseError>>,
|
||||
}
|
||||
|
||||
pub enum Message {
|
||||
Run(Box<dyn (FnOnce(&mut Runtime)) + Send + Sync>),
|
||||
HttpDispatch(DispatchArgs),
|
||||
CallFunction(
|
||||
Option<Module>,
|
||||
&'static str,
|
||||
Vec<serde_json::Value>,
|
||||
oneshot::Sender<Result<serde_json::Value, AnyError>>,
|
||||
),
|
||||
LoadModule(Module, oneshot::Sender<Result<(), AnyError>>),
|
||||
}
|
||||
|
||||
pub struct State {
|
||||
private_sender: kanal::AsyncSender<Message>,
|
||||
connection: Mutex<Option<trailbase_sqlite::Connection>>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub async fn load_module(&self, module: Module) -> Result<(), AnyError> {
|
||||
let (sender, receiver) = oneshot::channel::<Result<(), AnyError>>();
|
||||
|
||||
self
|
||||
.private_sender
|
||||
.send(Message::LoadModule(module, sender))
|
||||
.await?;
|
||||
|
||||
let _ = receiver.await.map_err(|err| {
|
||||
error!("Failed to await module loading: {err}");
|
||||
return err;
|
||||
})?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pub async fn send_privately(&self, msg: Message) -> Result<(), kanal::SendError> {
|
||||
return self.private_sender.send(msg).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RuntimeSingleton {
|
||||
n_threads: usize,
|
||||
|
||||
// Thread handle
|
||||
handle: Option<std::thread::JoinHandle<()>>,
|
||||
|
||||
// Shared sender.
|
||||
shared_sender: kanal::AsyncSender<Message>,
|
||||
|
||||
// Isolate state.
|
||||
state: Vec<State>,
|
||||
}
|
||||
|
||||
impl Drop for RuntimeSingleton {
|
||||
fn drop(&mut self) {
|
||||
if let Some(handle) = self.handle.take() {
|
||||
self.state.clear();
|
||||
if let Err(err) = handle.join() {
|
||||
error!("Failed to join main rt thread: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Completer {
|
||||
name: String,
|
||||
promise: Promise<JsHttpResponse>,
|
||||
reply: oneshot::Sender<Result<JsHttpResponse, JsHttpResponseError>>,
|
||||
}
|
||||
|
||||
impl Completer {
|
||||
fn is_ready(&self, runtime: &mut Runtime) -> bool {
|
||||
return !self.promise.is_pending(runtime);
|
||||
}
|
||||
|
||||
async fn resolve(self, runtime: &mut Runtime) {
|
||||
let value = self
|
||||
.promise
|
||||
.into_future(runtime)
|
||||
.await
|
||||
.map_err(|err| JsHttpResponseError::Internal(err.into()));
|
||||
|
||||
if self.reply.send(value).is_err() {
|
||||
error!("Completer send failed for : {}", self.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RuntimeSingleton {
|
||||
/// Bring up `threads` worker/isolate threads with basic setup.
|
||||
///
|
||||
/// NOTE: functions to install routes and jobs are registered later, we need an AppState first.
|
||||
fn new_with_threads(threads: Option<usize>) -> Self {
|
||||
let n_threads = match threads {
|
||||
Some(n) => n,
|
||||
None => std::thread::available_parallelism().map_or_else(
|
||||
|err| {
|
||||
error!("Failed to get number of threads: {err}");
|
||||
return 1;
|
||||
},
|
||||
|x| x.get(),
|
||||
),
|
||||
};
|
||||
|
||||
info!("Starting v8 JavaScript runtime with {n_threads} workers.");
|
||||
|
||||
let (shared_sender, shared_receiver) = kanal::unbounded_async::<Message>();
|
||||
|
||||
let (state, receivers): (Vec<State>, Vec<kanal::AsyncReceiver<Message>>) = (0..n_threads)
|
||||
.map(|_index| {
|
||||
let (sender, receiver) = kanal::unbounded_async::<Message>();
|
||||
|
||||
return (
|
||||
State {
|
||||
private_sender: sender,
|
||||
connection: Mutex::new(None),
|
||||
},
|
||||
receiver,
|
||||
);
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let handle = if n_threads > 0 {
|
||||
Some(std::thread::spawn(move || {
|
||||
// swc_ecma_codegen is very spammy (or at least used to be):
|
||||
// https://github.com/swc-project/swc/pull/9604
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::Targets::new().with_target(
|
||||
"tracing::span",
|
||||
tracing_subscriber::filter::LevelFilter::WARN,
|
||||
))
|
||||
.set_default();
|
||||
|
||||
init_platform(n_threads as u32, true);
|
||||
|
||||
let threads: Vec<_> = receivers
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, receiver)| {
|
||||
let shared_receiver = shared_receiver.clone();
|
||||
|
||||
return std::thread::spawn(move || {
|
||||
let tokio_runtime = std::rc::Rc::new(
|
||||
tokio::runtime::Builder::new_current_thread()
|
||||
.enable_time()
|
||||
.enable_io()
|
||||
.thread_name("v8-runtime")
|
||||
.build()
|
||||
.expect("startup"),
|
||||
);
|
||||
|
||||
let mut js_runtime = match Self::init_runtime(index, tokio_runtime.clone()) {
|
||||
Ok(js_runtime) => js_runtime,
|
||||
Err(err) => {
|
||||
panic!("Failed to init v8 runtime on thread {index}: {err}");
|
||||
}
|
||||
};
|
||||
|
||||
event_loop(&mut js_runtime, receiver, shared_receiver);
|
||||
});
|
||||
})
|
||||
.collect();
|
||||
|
||||
for (idx, thread) in threads.into_iter().enumerate() {
|
||||
if let Err(err) = thread.join() {
|
||||
error!("Failed to join worker: {idx}: {err:?}");
|
||||
}
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
return RuntimeSingleton {
|
||||
n_threads,
|
||||
shared_sender,
|
||||
handle,
|
||||
state,
|
||||
};
|
||||
}
|
||||
|
||||
fn init_runtime(
|
||||
index: usize,
|
||||
tokio_runtime: std::rc::Rc<tokio::runtime::Runtime>,
|
||||
) -> Result<Runtime, AnyError> {
|
||||
let mut runtime = rustyscript::Runtime::with_tokio_runtime(
|
||||
rustyscript::RuntimeOptions {
|
||||
import_provider: Some(Box::new(crate::import_provider::ImportProvider)),
|
||||
schema_whlist: HashSet::from(["trailbase".to_string()]),
|
||||
..Default::default()
|
||||
},
|
||||
tokio_runtime,
|
||||
)?;
|
||||
|
||||
runtime
|
||||
.register_function("isolate_id", move |_args: &[serde_json::Value]| {
|
||||
return Ok(serde_json::json!(index));
|
||||
})
|
||||
.expect("Failed to register 'isolate_id' function");
|
||||
|
||||
runtime.register_async_function("query", move |args: Vec<serde_json::Value>| {
|
||||
Box::pin(async move {
|
||||
let query: String = get_arg(&args, 0)?;
|
||||
let params = json_values_to_params(get_arg(&args, 1)?)?;
|
||||
|
||||
let Some(conn) = get_runtime(None).state[index].connection.lock().clone() else {
|
||||
return Err(rustyscript::Error::Runtime(
|
||||
"missing db connection".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let rows = conn
|
||||
.write_query_rows(query, params)
|
||||
.await
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
let values = rows
|
||||
.iter()
|
||||
.map(|row| -> Result<serde_json::Value, JsonError> {
|
||||
return Ok(serde_json::Value::Array(row_to_json_array(row)?));
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
return Ok(serde_json::Value::Array(values));
|
||||
})
|
||||
})?;
|
||||
|
||||
runtime.register_async_function("execute", move |args: Vec<serde_json::Value>| {
|
||||
Box::pin(async move {
|
||||
let query: String = get_arg(&args, 0)?;
|
||||
let params = json_values_to_params(get_arg(&args, 1)?)?;
|
||||
|
||||
let Some(conn) = get_runtime(None).state[index].connection.lock().clone() else {
|
||||
return Err(rustyscript::Error::Runtime(
|
||||
"missing db connection".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let rows_affected = conn
|
||||
.execute(query, params)
|
||||
.await
|
||||
.map_err(|err| rustyscript::Error::Runtime(err.to_string()))?;
|
||||
|
||||
return Ok(serde_json::Value::Number(rows_affected.into()));
|
||||
})
|
||||
})?;
|
||||
|
||||
return Ok(runtime);
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_message(
|
||||
runtime: &mut Runtime,
|
||||
msg: Message,
|
||||
completers: &mut Vec<Completer>,
|
||||
) -> Result<(), AnyError> {
|
||||
match msg {
|
||||
Message::Run(f) => {
|
||||
f(runtime);
|
||||
}
|
||||
Message::HttpDispatch(args) => {
|
||||
let channel = args.reply;
|
||||
let uri = args.uri.clone();
|
||||
let promise = match runtime.call_function_immediate::<Promise<JsHttpResponse>>(
|
||||
None,
|
||||
"__dispatch",
|
||||
json_args!(
|
||||
args.method,
|
||||
args.route_path,
|
||||
args.uri,
|
||||
args.path_params,
|
||||
args.headers,
|
||||
args.user,
|
||||
args.body
|
||||
),
|
||||
) {
|
||||
Ok(promise) => promise,
|
||||
Err(err) => {
|
||||
if channel
|
||||
.send(Err(JsHttpResponseError::Internal(err.into())))
|
||||
.is_err()
|
||||
{
|
||||
error!("dispatch sending error failed");
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
completers.push(Completer {
|
||||
name: uri,
|
||||
promise,
|
||||
reply: channel,
|
||||
});
|
||||
}
|
||||
Message::CallFunction(module, name, args, sender) => {
|
||||
let module_handle = if let Some(module) = module {
|
||||
runtime.load_module_async(&module).await.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let result: Result<serde_json::Value, AnyError> = runtime
|
||||
.call_function_async::<serde_json::Value>(module_handle.as_ref(), name, &args)
|
||||
.await
|
||||
.map_err(|err| err.into());
|
||||
|
||||
if sender.send(result).is_err() {
|
||||
error!("Sending of js function call reply failed");
|
||||
}
|
||||
}
|
||||
Message::LoadModule(module, sender) => {
|
||||
runtime.load_module_async(&module).await?;
|
||||
if sender.send(Ok(())).is_err() {
|
||||
error!("Load module send failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
/// The main event-loop running for every isolate/worker.
|
||||
fn event_loop(
|
||||
runtime: &mut Runtime,
|
||||
private_recv: kanal::AsyncReceiver<Message>,
|
||||
shared_recv: kanal::AsyncReceiver<Message>,
|
||||
) {
|
||||
const DURATION: Option<Duration> = Some(Duration::from_millis(25));
|
||||
const OPTS: PollEventLoopOptions = PollEventLoopOptions {
|
||||
wait_for_inspector: false,
|
||||
pump_v8_message_loop: true,
|
||||
};
|
||||
|
||||
runtime.tokio_runtime().block_on(async {
|
||||
let mut completers: Vec<Completer> = vec![];
|
||||
|
||||
loop {
|
||||
let completed_indexes = completers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(idx, completer)| {
|
||||
if completer.is_ready(runtime) {
|
||||
Some(idx)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for index in completed_indexes.into_iter().rev() {
|
||||
completers.swap_remove(index).resolve(runtime).await;
|
||||
}
|
||||
|
||||
// Either pump or wait for a new private or shared message.
|
||||
tokio::select! {
|
||||
// Keep pumping while there are still futures (HTTP requests) that need completing.
|
||||
result = runtime.await_event_loop(OPTS, DURATION), if !completers.is_empty() => {
|
||||
if let Err(err) = result{
|
||||
error!("JS event loop: {err}");
|
||||
}
|
||||
},
|
||||
msg = private_recv.recv() => {
|
||||
let Ok(msg) = msg else {
|
||||
panic!("private channel closed");
|
||||
};
|
||||
if let Err(err) = handle_message(runtime, msg, &mut completers).await {
|
||||
error!("Handle private message: {err}");
|
||||
}
|
||||
},
|
||||
msg = shared_recv.recv() => {
|
||||
let Ok(msg) = msg else {
|
||||
panic!("private channel closed");
|
||||
};
|
||||
if let Err(err) = handle_message(runtime, msg, &mut completers).await {
|
||||
error!("Handle shared message: {err}");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// NOTE: Repeated runtime initialization, e.g. in a multi-threaded context, leads to segfaults.
|
||||
// rustyscript::init_platform is supposed to help with this but we haven't found a way to
|
||||
// make it work. Thus, we're making the V8 VM a singleton (like Dart's).
|
||||
fn get_runtime(n_threads: Option<usize>) -> &'static RuntimeSingleton {
|
||||
static RUNTIME: OnceLock<RuntimeSingleton> = OnceLock::new();
|
||||
return RUNTIME.get_or_init(move || RuntimeSingleton::new_with_threads(n_threads));
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RuntimeHandle {
|
||||
pub runtime: &'static RuntimeSingleton,
|
||||
}
|
||||
|
||||
impl RuntimeHandle {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
return Self {
|
||||
runtime: get_runtime(None),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn new_with_threads(n_threads: usize) -> Self {
|
||||
return Self {
|
||||
runtime: get_runtime(Some(n_threads)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn num_threads(&self) -> usize {
|
||||
return self.runtime.n_threads;
|
||||
}
|
||||
|
||||
pub fn state(&self) -> &'static Vec<State> {
|
||||
return &self.runtime.state;
|
||||
}
|
||||
|
||||
pub async fn send_to_any_isolate(&self, msg: Message) -> Result<(), kanal::SendError> {
|
||||
return self.runtime.shared_sender.send(msg).await;
|
||||
}
|
||||
|
||||
pub fn set_connection(&self, conn: trailbase_sqlite::Connection, r#override: bool) {
|
||||
for s in &self.runtime.state {
|
||||
let mut lock = s.connection.lock();
|
||||
if lock.is_some() {
|
||||
if !r#override {
|
||||
panic!("connection already set");
|
||||
}
|
||||
|
||||
debug!("connection already set");
|
||||
} else {
|
||||
lock.replace(conn.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn json_value_to_param(
|
||||
value: serde_json::Value,
|
||||
) -> Result<trailbase_sqlite::Value, rustyscript::Error> {
|
||||
use rustyscript::Error;
|
||||
return Ok(match value {
|
||||
serde_json::Value::Object(ref _map) => {
|
||||
return Err(Error::Runtime("Object unsupported".to_string()));
|
||||
}
|
||||
serde_json::Value::Array(ref _arr) => {
|
||||
return Err(Error::Runtime("Array unsupported".to_string()));
|
||||
}
|
||||
serde_json::Value::Null => trailbase_sqlite::Value::Null,
|
||||
serde_json::Value::Bool(b) => trailbase_sqlite::Value::Integer(b as i64),
|
||||
serde_json::Value::String(str) => trailbase_sqlite::Value::Text(str),
|
||||
serde_json::Value::Number(number) => {
|
||||
if let Some(n) = number.as_i64() {
|
||||
trailbase_sqlite::Value::Integer(n)
|
||||
} else if let Some(n) = number.as_u64() {
|
||||
trailbase_sqlite::Value::Integer(n as i64)
|
||||
} else if let Some(n) = number.as_f64() {
|
||||
trailbase_sqlite::Value::Real(n)
|
||||
} else {
|
||||
return Err(Error::Runtime(format!("invalid number: {number:?}")));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn json_values_to_params(
|
||||
values: Vec<serde_json::Value>,
|
||||
) -> Result<Vec<trailbase_sqlite::Value>, rustyscript::Error> {
|
||||
return values.into_iter().map(json_value_to_param).collect();
|
||||
}
|
||||
|
||||
impl IntoResponse for JsHttpResponseError {
|
||||
fn into_response(self) -> Response {
|
||||
let (status, body): (StatusCode, Option<String>) = match self {
|
||||
Self::Precondition(err) => (StatusCode::PRECONDITION_FAILED, Some(err.to_string())),
|
||||
Self::Internal(err) => (StatusCode::INTERNAL_SERVER_ERROR, Some(err.to_string())),
|
||||
};
|
||||
|
||||
if let Some(body) = body {
|
||||
return Response::builder()
|
||||
.status(status)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.body(Body::new(body))
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
return Response::builder()
|
||||
.status(status)
|
||||
.body(Body::empty())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_arg<T>(args: &[serde_json::Value], i: usize) -> Result<T, rustyscript::Error>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
use rustyscript::Error;
|
||||
let arg = args
|
||||
.get(i)
|
||||
.ok_or_else(|| Error::Runtime(format!("Range err {i} > {}", args.len())))?;
|
||||
return serde_json::from_value::<T>(arg.clone()).map_err(|err| Error::Runtime(err.to_string()));
|
||||
}
|
||||
|
||||
pub async fn write_js_runtime_files(data_dir: impl AsRef<Path>) {
|
||||
let path = data_dir.as_ref();
|
||||
if let Err(err) = tokio::fs::write(
|
||||
path.join("trailbase.js"),
|
||||
cow_to_string(
|
||||
JsRuntimeAssets::get("index.js")
|
||||
.expect("Failed to read rt/index.js")
|
||||
.data,
|
||||
)
|
||||
.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("Failed to write 'trailbase.js': {err}");
|
||||
}
|
||||
|
||||
if let Err(err) = tokio::fs::write(
|
||||
path.join("trailbase.d.ts"),
|
||||
cow_to_string(
|
||||
JsRuntimeAssets::get("index.d.ts")
|
||||
.expect("Failed to read rt/index.d.ts")
|
||||
.data,
|
||||
)
|
||||
.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!("Failed to write 'trailbase.d.ts': {err}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rustyscript::Module;
|
||||
|
||||
async fn call_function<T>(
|
||||
sender: &kanal::AsyncSender<Message>,
|
||||
module: Option<Module>,
|
||||
name: &'static str,
|
||||
args: Vec<serde_json::Value>,
|
||||
) -> Result<T, AnyError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
let (resp_sender, resp_receiver) = oneshot::channel::<Result<serde_json::Value, AnyError>>();
|
||||
sender
|
||||
.send(Message::CallFunction(module, name, args, resp_sender))
|
||||
.await?;
|
||||
|
||||
return Ok(serde_json::from_value::<T>(resp_receiver.await??)?);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_serial_tests() {
|
||||
// NOTE: needs to run serially since registration of SQLite connection with singleton v8
|
||||
// runtime is racy.
|
||||
test_runtime_apply().await;
|
||||
test_runtime_javascript().await;
|
||||
test_javascript_query().await;
|
||||
test_javascript_execute().await;
|
||||
}
|
||||
|
||||
async fn test_runtime_apply() {
|
||||
let (sender, receiver) = tokio::sync::oneshot::channel::<i64>();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
handle
|
||||
.runtime
|
||||
.shared_sender
|
||||
.send(Message::Run(Box::new(|_rt| {
|
||||
sender.send(5).unwrap();
|
||||
})))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(5, receiver.await.unwrap());
|
||||
}
|
||||
|
||||
async fn test_runtime_javascript() {
|
||||
let handle = RuntimeHandle::new();
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.js",
|
||||
r#"
|
||||
export function test_fun() {
|
||||
return "test0";
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = call_function::<String>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_fun",
|
||||
vec![],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!("test0", result);
|
||||
}
|
||||
|
||||
fn override_connection(handle: &RuntimeHandle, conn: trailbase_sqlite::Connection) {
|
||||
for s in &handle.runtime.state {
|
||||
let mut lock = s.connection.lock();
|
||||
if lock.is_some() {
|
||||
debug!("connection already set");
|
||||
}
|
||||
lock.replace(conn.clone());
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_javascript_query() {
|
||||
let conn = trailbase_sqlite::Connection::open_in_memory().unwrap();
|
||||
conn
|
||||
.execute("CREATE TABLE test (v0 TEXT, v1 INTEGER);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
conn
|
||||
.execute("INSERT INTO test (v0, v1) VALUES ('0', 0), ('1', 1);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
override_connection(&handle, conn);
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.ts",
|
||||
r#"
|
||||
import { query } from "trailbase:main";
|
||||
|
||||
export async function test_query(queryStr: string) : Promise<unknown[][]> {
|
||||
return await query(queryStr, []);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let result = call_function::<Vec<Vec<serde_json::Value>>>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_query",
|
||||
vec![serde_json::json!("SELECT * FROM test")],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec![
|
||||
vec![
|
||||
serde_json::Value::String("0".to_string()),
|
||||
serde_json::Value::Number(0.into())
|
||||
],
|
||||
vec![
|
||||
serde_json::Value::String("1".to_string()),
|
||||
serde_json::Value::Number(1.into())
|
||||
],
|
||||
],
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_javascript_execute() {
|
||||
let conn = trailbase_sqlite::Connection::open_in_memory().unwrap();
|
||||
conn
|
||||
.execute("CREATE TABLE test (v0 TEXT, v1 INTEGER);", ())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let handle = RuntimeHandle::new();
|
||||
override_connection(&handle, conn.clone());
|
||||
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::filter::LevelFilter::WARN)
|
||||
.set_default();
|
||||
let module = Module::new(
|
||||
"module.ts",
|
||||
r#"
|
||||
import { execute } from "trailbase:main";
|
||||
|
||||
export async function test_execute(queryStr: string) : Promise<number> {
|
||||
return await execute(queryStr, []);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
let _result = call_function::<i64>(
|
||||
&handle.runtime.shared_sender,
|
||||
Some(module),
|
||||
"test_execute",
|
||||
vec![serde_json::json!("DELETE FROM test")],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let count: i64 = conn
|
||||
.read_query_row_f("SELECT COUNT(*) FROM test", (), |row| row.get(0))
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(0, count);
|
||||
}
|
||||
}
|
||||
8
trailbase-js/src/util.rs
Normal file
8
trailbase-js/src/util.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub(crate) fn cow_to_string(cow: Cow<'static, [u8]>) -> String {
|
||||
match cow {
|
||||
Cow::Borrowed(x) => String::from_utf8_lossy(x).to_string(),
|
||||
Cow::Owned(x) => String::from_utf8_lossy(&x).to_string(),
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ path = "benches/benchmark.rs"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
base64 = { version = "0.22.1", default-features = false }
|
||||
crossbeam-channel = "0.5.13"
|
||||
kanal = "0.1.1"
|
||||
log = { version = "^0.4.21", default-features = false }
|
||||
|
||||
@@ -13,10 +13,11 @@
|
||||
pub mod connection;
|
||||
pub mod error;
|
||||
pub mod params;
|
||||
mod rows;
|
||||
pub mod rows;
|
||||
|
||||
pub use rusqlite::types::Value;
|
||||
|
||||
pub use connection::Connection;
|
||||
pub use error::Error;
|
||||
pub use params::{NamedParamRef, NamedParams, NamedParamsRef, Params};
|
||||
pub use rows::{Row, Rows, ValueType};
|
||||
pub use rusqlite::types::Value;
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use base64::prelude::*;
|
||||
use rusqlite::{Statement, types};
|
||||
use std::fmt::Debug;
|
||||
use std::ops::Index;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum ValueType {
|
||||
@@ -198,3 +200,42 @@ impl Index<usize> for Row {
|
||||
return &self.0[idx];
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JsonError {
|
||||
#[error("Float not finite")]
|
||||
Finite,
|
||||
#[error("Value not found")]
|
||||
ValueNotFound,
|
||||
}
|
||||
|
||||
pub fn value_to_json(value: &types::Value) -> Result<serde_json::Value, JsonError> {
|
||||
return Ok(match value {
|
||||
types::Value::Null => serde_json::Value::Null,
|
||||
types::Value::Real(real) => {
|
||||
let Some(number) = serde_json::Number::from_f64(*real) else {
|
||||
return Err(JsonError::Finite);
|
||||
};
|
||||
serde_json::Value::Number(number)
|
||||
}
|
||||
types::Value::Integer(integer) => serde_json::Value::Number(serde_json::Number::from(*integer)),
|
||||
types::Value::Blob(blob) => serde_json::Value::String(BASE64_URL_SAFE.encode(blob)),
|
||||
types::Value::Text(text) => serde_json::Value::String(text.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
pub fn row_to_json_array(row: &Row) -> Result<Vec<serde_json::Value>, JsonError> {
|
||||
let cols = row.column_count();
|
||||
let mut json_row = Vec::<serde_json::Value>::with_capacity(cols);
|
||||
|
||||
for i in 0..cols {
|
||||
let value = row.get_value(i).ok_or(JsonError::ValueNotFound)?;
|
||||
json_row.push(value_to_json(value)?);
|
||||
}
|
||||
|
||||
return Ok(json_row);
|
||||
}
|
||||
|
||||
pub fn rows_to_json_arrays(rows: &Rows) -> Result<Vec<Vec<serde_json::Value>>, JsonError> {
|
||||
return rows.iter().map(row_to_json_array).collect();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user