mirror of
https://github.com/readur/readur.git
synced 2025-12-20 13:50:57 -06:00
275 lines
10 KiB
Rust
275 lines
10 KiB
Rust
#[cfg(test)]
|
|
mod generic_migration_tests {
|
|
use sqlx::{PgPool, Row};
|
|
use testcontainers::{runners::AsyncRunner, ImageExt};
|
|
use testcontainers_modules::postgres::Postgres;
|
|
use std::process::Command;
|
|
|
|
async fn setup_test_db() -> (PgPool, testcontainers::ContainerAsync<Postgres>) {
|
|
let postgres_image = Postgres::default()
|
|
.with_tag("15-alpine")
|
|
.with_env_var("POSTGRES_USER", "test")
|
|
.with_env_var("POSTGRES_PASSWORD", "test")
|
|
.with_env_var("POSTGRES_DB", "test");
|
|
|
|
let container = postgres_image.start().await.expect("Failed to start postgres container");
|
|
let port = container.get_host_port_ipv4(5432).await.expect("Failed to get postgres port");
|
|
|
|
let database_url = format!("postgresql://test:test@localhost:{}/test", port);
|
|
let pool = sqlx::postgres::PgPoolOptions::new()
|
|
.max_connections(5)
|
|
.connect(&database_url)
|
|
.await
|
|
.expect("Failed to connect to test database");
|
|
|
|
(pool, container)
|
|
}
|
|
|
|
fn get_new_migrations() -> Vec<String> {
|
|
// Get list of migration files that have changed between main and current branch
|
|
let output = Command::new("git")
|
|
.args(["diff", "--name-only", "main..HEAD", "--", "migrations/"])
|
|
.output()
|
|
.expect("Failed to run git diff");
|
|
|
|
if !output.status.success() {
|
|
println!("Git diff failed, assuming no migration changes");
|
|
return Vec::new();
|
|
}
|
|
|
|
let files = String::from_utf8_lossy(&output.stdout);
|
|
files
|
|
.lines()
|
|
.filter(|line| line.ends_with(".sql"))
|
|
.map(|s| s.to_string())
|
|
.collect()
|
|
}
|
|
|
|
fn get_migration_files_on_main() -> Vec<String> {
|
|
// Get list of migration files that exist on main branch
|
|
let output = Command::new("git")
|
|
.args(["ls-tree", "-r", "--name-only", "origin/main", "migrations/"])
|
|
.output()
|
|
.expect("Failed to list migration files on main");
|
|
|
|
if !output.status.success() {
|
|
println!("Failed to get migration files from main branch");
|
|
return Vec::new();
|
|
}
|
|
|
|
let files = String::from_utf8_lossy(&output.stdout);
|
|
files
|
|
.lines()
|
|
.filter(|line| line.ends_with(".sql"))
|
|
.map(|s| s.to_string())
|
|
.collect()
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_new_migrations_run_successfully() {
|
|
let new_migrations = get_new_migrations();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - test passes");
|
|
return;
|
|
}
|
|
|
|
println!("🔍 Found {} new migration(s):", new_migrations.len());
|
|
for migration in &new_migrations {
|
|
println!(" - {}", migration);
|
|
}
|
|
|
|
let (pool, _container) = setup_test_db().await;
|
|
|
|
// Run all migrations (including the new ones)
|
|
let result = sqlx::migrate!("./migrations").run(&pool).await;
|
|
assert!(result.is_ok(), "New migrations should run successfully: {:?}", result.err());
|
|
|
|
println!("✅ All migrations including new ones ran successfully");
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_migrations_are_idempotent() {
|
|
let new_migrations = get_new_migrations();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - idempotency test skipped");
|
|
return;
|
|
}
|
|
|
|
let (pool, _container) = setup_test_db().await;
|
|
|
|
// Run migrations twice to test idempotency
|
|
let result1 = sqlx::migrate!("./migrations").run(&pool).await;
|
|
assert!(result1.is_ok(), "First migration run should succeed: {:?}", result1.err());
|
|
|
|
let result2 = sqlx::migrate!("./migrations").run(&pool).await;
|
|
assert!(result2.is_ok(), "Second migration run should succeed (idempotent): {:?}", result2.err());
|
|
|
|
println!("✅ Migrations are idempotent");
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_migration_syntax_and_completeness() {
|
|
let new_migrations = get_new_migrations();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - syntax test skipped");
|
|
return;
|
|
}
|
|
|
|
// Check that new migration files exist and have basic structure
|
|
for migration_path in &new_migrations {
|
|
let content = std::fs::read_to_string(migration_path)
|
|
.expect(&format!("Should be able to read migration file: {}", migration_path));
|
|
|
|
assert!(!content.trim().is_empty(), "Migration file should not be empty: {}", migration_path);
|
|
|
|
// Basic syntax check - should not contain obvious SQL syntax errors
|
|
assert!(!content.contains("syntax error"), "Migration should not contain 'syntax error': {}", migration_path);
|
|
|
|
println!("✅ Migration file {} has valid syntax", migration_path);
|
|
}
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_migration_rollback_safety() {
|
|
let new_migrations = get_new_migrations();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - rollback safety test skipped");
|
|
return;
|
|
}
|
|
|
|
let (pool, _container) = setup_test_db().await;
|
|
|
|
// Test that we can run migrations and they create expected schema elements
|
|
let result = sqlx::migrate!("./migrations").run(&pool).await;
|
|
assert!(result.is_ok(), "Migrations should run successfully: {:?}", result.err());
|
|
|
|
// Verify basic schema integrity
|
|
let tables = sqlx::query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'")
|
|
.fetch_all(&pool)
|
|
.await
|
|
.expect("Should be able to query table list");
|
|
|
|
assert!(!tables.is_empty(), "Should have created at least one table");
|
|
|
|
// Check that essential tables exist
|
|
let table_names: Vec<String> = tables.iter()
|
|
.map(|row| row.get::<String, _>("table_name"))
|
|
.collect();
|
|
|
|
assert!(table_names.contains(&"documents".to_string()), "documents table should exist");
|
|
assert!(table_names.contains(&"users".to_string()), "users table should exist");
|
|
|
|
println!("✅ Migration rollback safety verified - schema is intact");
|
|
}
|
|
|
|
#[test]
|
|
fn test_migration_naming_convention() {
|
|
let new_migrations = get_new_migrations();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - naming convention test skipped");
|
|
return;
|
|
}
|
|
|
|
for migration_path in &new_migrations {
|
|
let filename = migration_path
|
|
.split('/')
|
|
.last()
|
|
.expect("Should have filename");
|
|
|
|
// Check naming convention: YYYYMMDDHHMMSS_description.sql
|
|
assert!(filename.len() > 15, "Migration filename should be long enough: {}", filename);
|
|
assert!(filename.ends_with(".sql"), "Migration should end with .sql: {}", filename);
|
|
|
|
let parts: Vec<&str> = filename.split('_').collect();
|
|
assert!(parts.len() >= 2, "Migration should have timestamp_description format: {}", filename);
|
|
|
|
let timestamp = parts[0];
|
|
assert!(timestamp.len() >= 14, "Timestamp should be at least 14 characters: {}", filename);
|
|
assert!(timestamp.chars().all(|c| c.is_numeric()), "Timestamp should be numeric: {}", filename);
|
|
|
|
println!("✅ Migration {} follows naming convention", filename);
|
|
}
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_no_changes_scenario_simulation() {
|
|
// Simulate what happens when git diff returns no changes (HEAD..HEAD)
|
|
let output = Command::new("git")
|
|
.args(["diff", "--name-only", "HEAD..HEAD", "--", "migrations/"])
|
|
.output()
|
|
.expect("Failed to run git diff");
|
|
|
|
let files = String::from_utf8_lossy(&output.stdout);
|
|
let no_changes: Vec<String> = files
|
|
.lines()
|
|
.filter(|line| line.ends_with(".sql"))
|
|
.map(|s| s.to_string())
|
|
.collect();
|
|
|
|
// This should be empty (no changes between HEAD and itself)
|
|
assert!(no_changes.is_empty(), "HEAD..HEAD should show no changes");
|
|
|
|
// Verify the test logic handles empty migrations gracefully
|
|
if no_changes.is_empty() {
|
|
println!("✅ No new migrations found - test passes");
|
|
// This is what the real tests do when no changes are found
|
|
return;
|
|
}
|
|
|
|
println!("✅ No migration changes scenario handled correctly");
|
|
}
|
|
|
|
#[test]
|
|
fn test_no_conflicting_migration_timestamps() {
|
|
let new_migrations = get_new_migrations();
|
|
let main_migrations = get_migration_files_on_main();
|
|
|
|
if new_migrations.is_empty() {
|
|
println!("✅ No new migrations found - timestamp conflict test skipped");
|
|
return;
|
|
}
|
|
|
|
// Extract timestamps from new migrations
|
|
let new_timestamps: Vec<String> = new_migrations.iter()
|
|
.map(|path| {
|
|
let filename = path.split('/').last().unwrap();
|
|
let timestamp = filename.split('_').next().unwrap();
|
|
timestamp.to_string()
|
|
})
|
|
.collect();
|
|
|
|
// Extract timestamps from existing migrations on main
|
|
let main_timestamps: Vec<String> = main_migrations.iter()
|
|
.map(|path| {
|
|
let filename = path.split('/').last().unwrap();
|
|
let timestamp = filename.split('_').next().unwrap();
|
|
timestamp.to_string()
|
|
})
|
|
.collect();
|
|
|
|
// Check for conflicts
|
|
for new_ts in &new_timestamps {
|
|
assert!(
|
|
!main_timestamps.contains(new_ts),
|
|
"Migration timestamp {} conflicts with existing migration on main",
|
|
new_ts
|
|
);
|
|
}
|
|
|
|
// Check for duplicates within new migrations
|
|
for (i, ts1) in new_timestamps.iter().enumerate() {
|
|
for (j, ts2) in new_timestamps.iter().enumerate() {
|
|
if i != j {
|
|
assert_ne!(ts1, ts2, "Duplicate migration timestamp found: {}", ts1);
|
|
}
|
|
}
|
|
}
|
|
|
|
println!("✅ No migration timestamp conflicts found");
|
|
}
|
|
} |