Compare commits

...

1 Commits

Author SHA1 Message Date
Corentin Thomasset
ca83ee3868 xplo(db): switch to kysely 2025-11-05 22:01:08 +01:00
126 changed files with 4679 additions and 21471 deletions

View File

@@ -50,6 +50,7 @@
"@crowlog/logger": "^2.0.0",
"@hono/node-server": "^1.14.4",
"@libsql/client": "^0.14.0",
"@libsql/kysely-libsql": "^0.4.1",
"@owlrelay/api-sdk": "^0.0.2",
"@owlrelay/webhook": "^0.0.3",
"@papra/lecture": "workspace:*",
@@ -65,6 +66,7 @@
"drizzle-orm": "^0.38.4",
"figue": "^3.1.1",
"hono": "^4.8.2",
"kysely": "^0.28.8",
"lodash-es": "^4.17.21",
"mime-types": "^3.0.1",
"nanoid": "^5.1.5",
@@ -78,6 +80,7 @@
"sanitize-html": "^2.17.0",
"stripe": "^17.7.0",
"tsx": "^4.20.3",
"valibot": "1.0.0-beta.10",
"zod": "^3.25.67"
},
"devDependencies": {

View File

@@ -1,4 +1,4 @@
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../../modules/app/database/database';
import { initialSchemaSetupMigration } from './0001-initial-schema-setup.migration';
@@ -9,7 +9,7 @@ describe('0001-initial-schema-setup migration', () => {
const { db } = setupDatabase({ url: ':memory:' });
await initialSchemaSetupMigration.up({ db });
const { rows: existingTables } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
const { rows: existingTables } = await db.executeQuery<{ name: string }>(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`.compile(db));
expect(existingTables.map(({ name }) => name)).to.eql([
'documents',
@@ -43,7 +43,7 @@ describe('0001-initial-schema-setup migration', () => {
await initialSchemaSetupMigration.down({ db });
const { rows: existingTablesAfterDown } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
const { rows: existingTablesAfterDown } = await db.executeQuery<{ name: string }>(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`.compile(db));
expect(existingTablesAfterDown.map(({ name }) => name)).to.eql([]);
});

View File

@@ -1,220 +1,326 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const initialSchemaSetupMigration = {
name: 'initial-schema-setup',
description: 'Creation of the base tables for the application',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "documents" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"is_deleted" integer DEFAULT false NOT NULL,
"deleted_at" integer,
"organization_id" text NOT NULL,
"created_by" text,
"deleted_by" text,
"original_name" text NOT NULL,
"original_size" integer DEFAULT 0 NOT NULL,
"original_storage_key" text NOT NULL,
"original_sha256_hash" text NOT NULL,
"name" text NOT NULL,
"mime_type" text NOT NULL,
"content" text DEFAULT '' NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("deleted_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null
);
`),
// Create users table first (no dependencies)
await db.schema
.createTable('users')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('email', 'text', col => col.notNull())
.addColumn('email_verified', 'integer', col => col.notNull().defaultTo(0))
.addColumn('name', 'text')
.addColumn('image', 'text')
.addColumn('max_organization_count', 'integer')
.execute();
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_is_deleted_created_at_index" ON "documents" ("organization_id","is_deleted","created_at");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_is_deleted_index" ON "documents" ("organization_id","is_deleted");`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "documents_organization_id_original_sha256_hash_unique" ON "documents" ("organization_id","original_sha256_hash");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_original_sha256_hash_index" ON "documents" ("original_sha256_hash");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_size_index" ON "documents" ("organization_id","original_size");`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "organization_invitations" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"email" text NOT NULL,
"role" text,
"status" text NOT NULL,
"expires_at" integer NOT NULL,
"inviter_id" text NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("inviter_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);
`),
await db.schema
.createIndex('users_email_unique')
.unique()
.ifNotExists()
.on('users')
.column('email')
.execute();
db.run(sql`CREATE TABLE IF NOT EXISTS "organization_members" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"user_id" text NOT NULL,
"role" text NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
await db.schema
.createIndex('users_email_index')
.ifNotExists()
.on('users')
.column('email')
.execute();
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "organization_members_user_organization_unique" ON "organization_members" ("organization_id","user_id");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "organizations" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"customer_id" text
);`),
// Create organizations table (no dependencies)
await db.schema
.createTable('organizations')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('name', 'text', col => col.notNull())
.addColumn('customer_id', 'text')
.execute();
db.run(sql`CREATE TABLE IF NOT EXISTS "user_roles" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"user_id" text NOT NULL,
"role" text NOT NULL,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
// Create organization_members table (depends on users and organizations)
await db.schema
.createTable('organization_members')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('user_id', 'text', col => col.notNull().references('users.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('role', 'text', col => col.notNull())
.execute();
db.run(sql`CREATE INDEX IF NOT EXISTS "user_roles_role_index" ON "user_roles" ("role");`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "user_roles_user_id_role_unique_index" ON "user_roles" ("user_id","role");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "documents_tags" (
"document_id" text NOT NULL,
"tag_id" text NOT NULL,
PRIMARY KEY("document_id", "tag_id"),
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade
);`),
await db.schema
.createIndex('organization_members_user_organization_unique')
.unique()
.ifNotExists()
.on('organization_members')
.columns(['organization_id', 'user_id'])
.execute();
db.run(sql`CREATE TABLE IF NOT EXISTS "tags" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"name" text NOT NULL,
"color" text NOT NULL,
"description" text,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
// Create organization_invitations table (depends on users and organizations)
await db.schema
.createTable('organization_invitations')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('email', 'text', col => col.notNull())
.addColumn('role', 'text')
.addColumn('status', 'text', col => col.notNull())
.addColumn('expires_at', 'integer', col => col.notNull())
.addColumn('inviter_id', 'text', col => col.notNull().references('users.id').onDelete('cascade').onUpdate('cascade'))
.execute();
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "tags_organization_id_name_unique" ON "tags" ("organization_id","name");`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "users" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"email" text NOT NULL,
"email_verified" integer DEFAULT false NOT NULL,
"name" text,
"image" text,
"max_organization_count" integer
);
`),
// Create documents table (depends on users and organizations)
await db.schema
.createTable('documents')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('is_deleted', 'integer', col => col.notNull().defaultTo(0))
.addColumn('deleted_at', 'integer')
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('created_by', 'text', col => col.references('users.id').onDelete('set null').onUpdate('cascade'))
.addColumn('deleted_by', 'text', col => col.references('users.id').onDelete('set null').onUpdate('cascade'))
.addColumn('original_name', 'text', col => col.notNull())
.addColumn('original_size', 'integer', col => col.notNull().defaultTo(0))
.addColumn('original_storage_key', 'text', col => col.notNull())
.addColumn('original_sha256_hash', 'text', col => col.notNull())
.addColumn('name', 'text', col => col.notNull())
.addColumn('mime_type', 'text', col => col.notNull())
.addColumn('content', 'text', col => col.notNull().defaultTo(''))
.execute();
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "users_email_unique" ON "users" ("email");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "users_email_index" ON "users" ("email");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_accounts" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"user_id" text,
"account_id" text NOT NULL,
"provider_id" text NOT NULL,
"access_token" text,
"refresh_token" text,
"access_token_expires_at" integer,
"refresh_token_expires_at" integer,
"scope" text,
"id_token" text,
"password" text,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
await db.schema
.createIndex('documents_organization_id_is_deleted_created_at_index')
.ifNotExists()
.on('documents')
.columns(['organization_id', 'is_deleted', 'created_at'])
.execute();
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_sessions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"token" text NOT NULL,
"user_id" text,
"expires_at" integer NOT NULL,
"ip_address" text,
"user_agent" text,
"active_organization_id" text,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("active_organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE set null
);`),
await db.schema
.createIndex('documents_organization_id_is_deleted_index')
.ifNotExists()
.on('documents')
.columns(['organization_id', 'is_deleted'])
.execute();
db.run(sql`CREATE INDEX IF NOT EXISTS "auth_sessions_token_index" ON "auth_sessions" ("token");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_verifications" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"identifier" text NOT NULL,
"value" text NOT NULL,
"expires_at" integer NOT NULL
);`),
await db.schema
.createIndex('documents_organization_id_original_sha256_hash_unique')
.unique()
.ifNotExists()
.on('documents')
.columns(['organization_id', 'original_sha256_hash'])
.execute();
db.run(sql`CREATE INDEX IF NOT EXISTS "auth_verifications_identifier_index" ON "auth_verifications" ("identifier");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "intake_emails" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"email_address" text NOT NULL,
"organization_id" text NOT NULL,
"allowed_origins" text DEFAULT '[]' NOT NULL,
"is_enabled" integer DEFAULT true NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
await db.schema
.createIndex('documents_original_sha256_hash_index')
.ifNotExists()
.on('documents')
.column('original_sha256_hash')
.execute();
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "intake_emails_email_address_unique" ON "intake_emails" ("email_address");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "organization_subscriptions" (
"id" text PRIMARY KEY NOT NULL,
"customer_id" text NOT NULL,
"organization_id" text NOT NULL,
"plan_id" text NOT NULL,
"status" text NOT NULL,
"seats_count" integer NOT NULL,
"current_period_end" integer NOT NULL,
"current_period_start" integer NOT NULL,
"cancel_at_period_end" integer DEFAULT false NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
]);
await db.schema
.createIndex('documents_organization_id_size_index')
.ifNotExists()
.on('documents')
.columns(['organization_id', 'original_size'])
.execute();
// Create tags table (depends on organizations)
await db.schema
.createTable('tags')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('name', 'text', col => col.notNull())
.addColumn('color', 'text', col => col.notNull())
.addColumn('description', 'text')
.execute();
await db.schema
.createIndex('tags_organization_id_name_unique')
.unique()
.ifNotExists()
.on('tags')
.columns(['organization_id', 'name'])
.execute();
// Create documents_tags junction table (depends on documents and tags)
await db.schema
.createTable('documents_tags')
.ifNotExists()
.addColumn('document_id', 'text', col => col.notNull().references('documents.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('tag_id', 'text', col => col.notNull().references('tags.id').onDelete('cascade').onUpdate('cascade'))
.addPrimaryKeyConstraint('documents_tags_pkey', ['document_id', 'tag_id'])
.execute();
// Create user_roles table (depends on users)
await db.schema
.createTable('user_roles')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('user_id', 'text', col => col.notNull().references('users.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('role', 'text', col => col.notNull())
.execute();
await db.schema
.createIndex('user_roles_role_index')
.ifNotExists()
.on('user_roles')
.column('role')
.execute();
await db.schema
.createIndex('user_roles_user_id_role_unique_index')
.unique()
.ifNotExists()
.on('user_roles')
.columns(['user_id', 'role'])
.execute();
// Create auth_accounts table (depends on users)
await db.schema
.createTable('auth_accounts')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('user_id', 'text', col => col.references('users.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('account_id', 'text', col => col.notNull())
.addColumn('provider_id', 'text', col => col.notNull())
.addColumn('access_token', 'text')
.addColumn('refresh_token', 'text')
.addColumn('access_token_expires_at', 'integer')
.addColumn('refresh_token_expires_at', 'integer')
.addColumn('scope', 'text')
.addColumn('id_token', 'text')
.addColumn('password', 'text')
.execute();
// Create auth_sessions table (depends on users and organizations)
await db.schema
.createTable('auth_sessions')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('token', 'text', col => col.notNull())
.addColumn('user_id', 'text', col => col.references('users.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('expires_at', 'integer', col => col.notNull())
.addColumn('ip_address', 'text')
.addColumn('user_agent', 'text')
.addColumn('active_organization_id', 'text', col => col.references('organizations.id').onDelete('set null').onUpdate('cascade'))
.execute();
await db.schema
.createIndex('auth_sessions_token_index')
.ifNotExists()
.on('auth_sessions')
.column('token')
.execute();
// Create auth_verifications table
await db.schema
.createTable('auth_verifications')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('identifier', 'text', col => col.notNull())
.addColumn('value', 'text', col => col.notNull())
.addColumn('expires_at', 'integer', col => col.notNull())
.execute();
await db.schema
.createIndex('auth_verifications_identifier_index')
.ifNotExists()
.on('auth_verifications')
.column('identifier')
.execute();
// Create intake_emails table (depends on organizations)
await db.schema
.createTable('intake_emails')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('email_address', 'text', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('allowed_origins', 'text', col => col.notNull().defaultTo('[]'))
.addColumn('is_enabled', 'integer', col => col.notNull().defaultTo(1))
.execute();
await db.schema
.createIndex('intake_emails_email_address_unique')
.unique()
.ifNotExists()
.on('intake_emails')
.column('email_address')
.execute();
// Create organization_subscriptions table (depends on organizations)
await db.schema
.createTable('organization_subscriptions')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('customer_id', 'text', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('plan_id', 'text', col => col.notNull())
.addColumn('status', 'text', col => col.notNull())
.addColumn('seats_count', 'integer', col => col.notNull())
.addColumn('current_period_end', 'integer', col => col.notNull())
.addColumn('current_period_start', 'integer', col => col.notNull())
.addColumn('cancel_at_period_end', 'integer', col => col.notNull().defaultTo(0))
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.execute();
},
down: async ({ db }) => {
await db.batch([
// Tables
db.run(sql`DROP TABLE IF EXISTS "organization_subscriptions";`),
db.run(sql`DROP TABLE IF EXISTS "intake_emails";`),
db.run(sql`DROP TABLE IF EXISTS "auth_verifications";`),
db.run(sql`DROP TABLE IF EXISTS "auth_sessions";`),
db.run(sql`DROP TABLE IF EXISTS "auth_accounts";`),
db.run(sql`DROP TABLE IF EXISTS "tags";`),
db.run(sql`DROP TABLE IF EXISTS "documents_tags";`),
db.run(sql`DROP TABLE IF EXISTS "user_roles";`),
db.run(sql`DROP TABLE IF EXISTS "organizations";`),
db.run(sql`DROP TABLE IF EXISTS "organization_members";`),
db.run(sql`DROP TABLE IF EXISTS "organization_invitations";`),
db.run(sql`DROP TABLE IF EXISTS "documents";`),
db.run(sql`DROP TABLE IF EXISTS "users";`),
// Drop tables in reverse order of creation (respecting foreign key constraints)
await db.schema.dropTable('organization_subscriptions').ifExists().execute();
await db.schema.dropTable('intake_emails').ifExists().execute();
await db.schema.dropTable('auth_verifications').ifExists().execute();
await db.schema.dropTable('auth_sessions').ifExists().execute();
await db.schema.dropTable('auth_accounts').ifExists().execute();
await db.schema.dropTable('user_roles').ifExists().execute();
await db.schema.dropTable('documents_tags').ifExists().execute();
await db.schema.dropTable('tags').ifExists().execute();
await db.schema.dropTable('documents').ifExists().execute();
await db.schema.dropTable('organization_invitations').ifExists().execute();
await db.schema.dropTable('organization_members').ifExists().execute();
await db.schema.dropTable('organizations').ifExists().execute();
await db.schema.dropTable('users').ifExists().execute();
// // Indexes
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_is_deleted_created_at_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_is_deleted_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_original_sha256_hash_unique";`),
db.run(sql`DROP INDEX IF EXISTS "documents_original_sha256_hash_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_size_index";`),
db.run(sql`DROP INDEX IF EXISTS "user_roles_role_index";`),
db.run(sql`DROP INDEX IF EXISTS "user_roles_user_id_role_unique_index";`),
db.run(sql`DROP INDEX IF EXISTS "tags_organization_id_name_unique";`),
db.run(sql`DROP INDEX IF EXISTS "users_email_unique";`),
]);
await db.schema.dropIndex('users_email_unique').ifExists().execute();
await db.schema.dropIndex('users_email_index').ifExists().execute();
await db.schema.dropIndex('organization_members_user_organization_unique').ifExists().execute();
await db.schema.dropIndex('documents_organization_id_is_deleted_created_at_index').ifExists().execute();
await db.schema.dropIndex('documents_organization_id_is_deleted_index').ifExists().execute();
await db.schema.dropIndex('documents_organization_id_original_sha256_hash_unique').ifExists().execute();
await db.schema.dropIndex('documents_original_sha256_hash_index').ifExists().execute();
await db.schema.dropIndex('documents_organization_id_size_index').ifExists().execute();
await db.schema.dropIndex('tags_organization_id_name_unique').ifExists().execute();
await db.schema.dropIndex('user_roles_role_index').ifExists().execute();
await db.schema.dropIndex('user_roles_user_id_role_unique_index').ifExists().execute();
await db.schema.dropIndex('auth_sessions_token_index').ifExists().execute();
await db.schema.dropIndex('auth_verifications_identifier_index').ifExists().execute();
await db.schema.dropIndex('intake_emails_email_address_unique').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,37 +1,37 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export const documentsFtsMigration = {
name: 'documents-fts',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE VIRTUAL TABLE IF NOT EXISTS documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4')`),
db.run(sql`INSERT INTO documents_fts(id, name, original_name, content) SELECT id, name, original_name, content FROM documents`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_insert AFTER INSERT ON documents BEGIN
INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content);
END
`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_update AFTER UPDATE ON documents BEGIN
UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id;
END
`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_delete AFTER DELETE ON documents BEGIN
DELETE FROM documents_fts WHERE id = old.id;
END
`),
]);
// FTS5 virtual tables and triggers require raw SQL (SQLite-specific)
await db.executeQuery(sql`CREATE VIRTUAL TABLE IF NOT EXISTS documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4')`.compile(db));
await db.executeQuery(sql`INSERT INTO documents_fts(id, name, original_name, content) SELECT id, name, original_name, content FROM documents`.compile(db));
await db.executeQuery(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_insert AFTER INSERT ON documents BEGIN
INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content);
END
`.compile(db));
await db.executeQuery(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_update AFTER UPDATE ON documents BEGIN
UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id;
END
`.compile(db));
await db.executeQuery(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_delete AFTER DELETE ON documents BEGIN
DELETE FROM documents_fts WHERE id = old.id;
END
`.compile(db));
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_insert`),
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_update`),
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_delete`),
db.run(sql`DROP TABLE IF EXISTS documents_fts`),
]);
await db.executeQuery(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_insert`.compile(db));
await db.executeQuery(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_update`.compile(db));
await db.executeQuery(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_delete`.compile(db));
await db.executeQuery(sql`DROP TABLE IF EXISTS documents_fts`.compile(db));
},
} satisfies Migration;

View File

@@ -1,57 +1,51 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const taggingRulesMigration = {
name: 'tagging-rules',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rule_actions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"tagging_rule_id" text NOT NULL,
"tag_id" text NOT NULL,
FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade
);
`),
// Create tagging_rules table first (depends on organizations)
await db.schema
.createTable('tagging_rules')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('organization_id', 'text', col => col.notNull().references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('name', 'text', col => col.notNull())
.addColumn('description', 'text')
.addColumn('enabled', 'integer', col => col.notNull().defaultTo(1))
.execute();
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rule_conditions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"tagging_rule_id" text NOT NULL,
"field" text NOT NULL,
"operator" text NOT NULL,
"value" text NOT NULL,
"is_case_sensitive" integer DEFAULT false NOT NULL,
FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade
);
`),
// Create tagging_rule_conditions table (depends on tagging_rules)
await db.schema
.createTable('tagging_rule_conditions')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('tagging_rule_id', 'text', col => col.notNull().references('tagging_rules.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('field', 'text', col => col.notNull())
.addColumn('operator', 'text', col => col.notNull())
.addColumn('value', 'text', col => col.notNull())
.addColumn('is_case_sensitive', 'integer', col => col.notNull().defaultTo(0))
.execute();
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rules" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"name" text NOT NULL,
"description" text,
"enabled" integer DEFAULT true NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);
`),
]);
// Create tagging_rule_actions table (depends on tagging_rules and tags)
await db.schema
.createTable('tagging_rule_actions')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('tagging_rule_id', 'text', col => col.notNull().references('tagging_rules.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('tag_id', 'text', col => col.notNull().references('tags.id').onDelete('cascade').onUpdate('cascade'))
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "tagging_rule_actions"`),
db.run(sql`DROP TABLE IF EXISTS "tagging_rule_conditions"`),
db.run(sql`DROP TABLE IF EXISTS "tagging_rules"`),
]);
await db.schema.dropTable('tagging_rule_actions').ifExists().execute();
await db.schema.dropTable('tagging_rule_conditions').ifExists().execute();
await db.schema.dropTable('tagging_rules').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,46 +1,52 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const apiKeysMigration = {
name: 'api-keys',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "api_key_organizations" (
"api_key_id" text NOT NULL,
"organization_member_id" text NOT NULL,
FOREIGN KEY ("api_key_id") REFERENCES "api_keys"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("organization_member_id") REFERENCES "organization_members"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "api_keys" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"key_hash" text NOT NULL,
"prefix" text NOT NULL,
"user_id" text NOT NULL,
"last_used_at" integer,
"expires_at" integer,
"permissions" text DEFAULT '[]' NOT NULL,
"all_organizations" integer DEFAULT false NOT NULL,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "api_keys_key_hash_unique" ON "api_keys" ("key_hash")`),
db.run(sql`CREATE INDEX IF NOT EXISTS "key_hash_index" ON "api_keys" ("key_hash")`),
]);
// Create api_keys table first (depends on users)
await db.schema
.createTable('api_keys')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('name', 'text', col => col.notNull())
.addColumn('key_hash', 'text', col => col.notNull())
.addColumn('prefix', 'text', col => col.notNull())
.addColumn('user_id', 'text', col => col.notNull().references('users.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('last_used_at', 'integer')
.addColumn('expires_at', 'integer')
.addColumn('permissions', 'text', col => col.notNull().defaultTo('[]'))
.addColumn('all_organizations', 'integer', col => col.notNull().defaultTo(0))
.execute();
await db.schema
.createIndex('api_keys_key_hash_unique')
.unique()
.ifNotExists()
.on('api_keys')
.column('key_hash')
.execute();
await db.schema
.createIndex('key_hash_index')
.ifNotExists()
.on('api_keys')
.column('key_hash')
.execute();
// Create api_key_organizations junction table (depends on api_keys and organization_members)
await db.schema
.createTable('api_key_organizations')
.ifNotExists()
.addColumn('api_key_id', 'text', col => col.notNull().references('api_keys.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('organization_member_id', 'text', col => col.notNull().references('organization_members.id').onDelete('cascade').onUpdate('cascade'))
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "api_key_organizations"`),
db.run(sql`DROP TABLE IF EXISTS "api_keys"`),
db.run(sql`DROP INDEX IF EXISTS "api_keys_key_hash_unique"`),
db.run(sql`DROP INDEX IF EXISTS "key_hash_index"`),
]);
await db.schema.dropTable('api_key_organizations').ifExists().execute();
await db.schema.dropTable('api_keys').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,62 +1,61 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const organizationsWebhooksMigration = {
name: 'organizations-webhooks',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhook_deliveries" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"webhook_id" text NOT NULL,
"event_name" text NOT NULL,
"request_payload" text NOT NULL,
"response_payload" text NOT NULL,
"response_status" integer NOT NULL,
FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhook_events" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"webhook_id" text NOT NULL,
"event_name" text NOT NULL,
FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade
);
`),
// Create webhooks table first
await db.schema
.createTable('webhooks')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('name', 'text', col => col.notNull())
.addColumn('url', 'text', col => col.notNull())
.addColumn('secret', 'text')
.addColumn('enabled', 'integer', col => col.notNull().defaultTo(1))
.addColumn('created_by', 'text', col => col.references('users.id').onDelete('set null').onUpdate('cascade'))
.addColumn('organization_id', 'text', col => col.references('organizations.id').onDelete('cascade').onUpdate('cascade'))
.execute();
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "webhook_events_webhook_id_event_name_unique" ON "webhook_events" ("webhook_id","event_name")`),
// Create webhook_events table (depends on webhooks)
await db.schema
.createTable('webhook_events')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('webhook_id', 'text', col => col.notNull().references('webhooks.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('event_name', 'text', col => col.notNull())
.execute();
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhooks" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"url" text NOT NULL,
"secret" text,
"enabled" integer DEFAULT true NOT NULL,
"created_by" text,
"organization_id" text,
FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);
`),
await db.schema
.createIndex('webhook_events_webhook_id_event_name_unique')
.unique()
.ifNotExists()
.on('webhook_events')
.columns(['webhook_id', 'event_name'])
.execute();
]);
// Create webhook_deliveries table (depends on webhooks)
await db.schema
.createTable('webhook_deliveries')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('updated_at', 'integer', col => col.notNull())
.addColumn('webhook_id', 'text', col => col.notNull().references('webhooks.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('event_name', 'text', col => col.notNull())
.addColumn('request_payload', 'text', col => col.notNull())
.addColumn('response_payload', 'text', col => col.notNull())
.addColumn('response_status', 'integer', col => col.notNull())
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "webhook_deliveries"`),
db.run(sql`DROP TABLE IF EXISTS "webhook_events"`),
db.run(sql`DROP INDEX IF EXISTS "webhook_events_webhook_id_event_name_unique"`),
db.run(sql`DROP TABLE IF EXISTS "webhooks"`),
]);
await db.schema.dropTable('webhook_deliveries').ifExists().execute();
await db.schema.dropTable('webhook_events').ifExists().execute();
await db.schema.dropTable('webhooks').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,22 +1,29 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export const organizationsInvitationsImprovementMigration = {
name: 'organizations-invitations-improvement',
up: async ({ db }) => {
await db.batch([
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text NOT NULL`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "organization_invitations_organization_email_unique" ON "organization_invitations" ("organization_id","email")`),
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text NOT NULL DEFAULT 'pending'`),
]);
await db.executeQuery(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text not null`.compile(db));
await db.executeQuery(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text not null DEFAULT 'pending'`.compile(db));
await db.schema
.createIndex('organization_invitations_organization_email_unique')
.unique()
.ifNotExists()
.on('organization_invitations')
.columns(['organization_id', 'email'])
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text`),
db.run(sql`DROP INDEX IF EXISTS "organization_invitations_organization_email_unique"`),
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text NOT NULL`),
]);
await db.executeQuery(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text`.compile(db));
await db.executeQuery(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text not null`.compile(db));
await db.schema
.dropIndex('organization_invitations_organization_email_unique')
.ifExists()
.execute();
},
} satisfies Migration;

View File

@@ -1,31 +1,23 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import type { Migration} from '../migrations.types';
export const documentActivityLogMigration = {
name: 'document-activity-log',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE no action,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE no action
);
`),
]);
await db.schema
.createTable('document_activity_log')
.ifNotExists()
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('document_id', 'text', col => col.notNull().references('documents.id').onDelete('cascade').onUpdate('cascade'))
.addColumn('event', 'text', col => col.notNull())
.addColumn('event_data', 'text')
.addColumn('user_id', 'text', col => col.references('users.id').onDelete('no action').onUpdate('cascade'))
.addColumn('tag_id', 'text', col => col.references('tags.id').onDelete('no action').onUpdate('cascade'))
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
]);
await db.schema.dropTable('document_activity_log').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,56 +1,75 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export const documentActivityLogOnDeleteSetNullMigration = {
name: 'document-activity-log-on-delete-set-null',
up: async ({ db }) => {
await db.batch([
db.run(sql`PRAGMA foreign_keys=OFF`),
db.run(sql`
CREATE TABLE "__new_document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE set null
);
`),
db.run(sql`
INSERT INTO "__new_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id") SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log";
`),
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
db.run(sql`ALTER TABLE "__new_document_activity_log" RENAME TO "document_activity_log"`),
db.run(sql`PRAGMA foreign_keys=ON`),
]);
// SQLite doesn't support modifying foreign keys, need to recreate table
await db.executeQuery(sql`PRAGMA foreign_keys=OFF`.compile(db));
await db.schema
.createTable('__new_document_activity_log')
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('document_id', 'text', col => col.notNull().references('documents.id').onUpdate('cascade').onDelete('cascade'))
.addColumn('event', 'text', col => col.notNull())
.addColumn('event_data', 'text')
.addColumn('user_id', 'text', col => col.references('users.id').onUpdate('cascade').onDelete('set null'))
.addColumn('tag_id', 'text', col => col.references('tags.id').onUpdate('cascade').onDelete('set null'))
.execute();
await db.executeQuery(sql`
INSERT INTO "__new_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id")
SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log"
`.compile(db));
await db
.schema
.dropTable('document_activity_log')
.ifExists()
.execute();
await db
.schema
.alterTable('__new_document_activity_log')
.renameTo('document_activity_log')
.execute();
await db.executeQuery(sql`PRAGMA foreign_keys=ON`.compile(db));
},
down: async ({ db }) => {
await db.batch([
db.run(sql`PRAGMA foreign_keys=OFF`),
db.run(sql`
CREATE TABLE "__restore_document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE no action,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE no action
);
`),
db.run(sql`INSERT INTO "__restore_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id") SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log";`),
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
db.run(sql`ALTER TABLE "__restore_document_activity_log" RENAME TO "document_activity_log"`),
db.run(sql`PRAGMA foreign_keys=ON`),
]);
await db.executeQuery(sql`PRAGMA foreign_keys=OFF`.compile(db));
await db.schema
.createTable('__restore_document_activity_log')
.addColumn('id', 'text', col => col.primaryKey().notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.addColumn('document_id', 'text', col => col.notNull().references('documents.id').onUpdate('cascade').onDelete('cascade'))
.addColumn('event', 'text', col => col.notNull())
.addColumn('event_data', 'text')
.addColumn('user_id', 'text', col => col.references('users.id').onUpdate('cascade').onDelete('no action'))
.addColumn('tag_id', 'text', col => col.references('tags.id').onUpdate('cascade').onDelete('no action'))
.execute();
await db.executeQuery(sql`
INSERT INTO "__restore_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id")
SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log"
`.compile(db));
await db
.schema
.dropTable('document_activity_log')
.ifExists()
.execute();
await db
.schema
.alterTable('__restore_document_activity_log')
.renameTo('document_activity_log')
.execute();
await db.executeQuery(sql`PRAGMA foreign_keys=ON`.compile(db));
},
} satisfies Migration;

View File

@@ -1,12 +1,10 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const dropLegacyMigrationsMigration = {
name: 'drop-legacy-migrations',
description: 'Drop the legacy migrations table as it is not used anymore',
up: async ({ db }) => {
await db.run(sql`DROP TABLE IF EXISTS "__drizzle_migrations"`);
await db.schema.dropTable('__drizzle_migrations').ifExists().execute();
},
} satisfies Migration;

View File

@@ -1,32 +1,49 @@
import type { BatchItem } from 'drizzle-orm/batch';
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export const documentFileEncryptionMigration = {
name: 'document-file-encryption',
up: async ({ db }) => {
// Check if columns already exist to handle reapplying migrations
const tableInfo = await db.run(sql`PRAGMA table_info(documents)`);
const tableInfo = await db.executeQuery<{ name: string }>(sql`PRAGMA table_info(documents)`.compile(db));
const existingColumns = tableInfo.rows.map(row => row.name);
const hasColumn = (columnName: string) => existingColumns.includes(columnName);
const statements = [
...(!hasColumn('file_encryption_key_wrapped') ? [sql`ALTER TABLE documents ADD COLUMN file_encryption_key_wrapped TEXT`] : []),
...(!hasColumn('file_encryption_kek_version') ? [sql`ALTER TABLE documents ADD COLUMN file_encryption_kek_version TEXT`] : []),
...(!hasColumn('file_encryption_algorithm') ? [sql`ALTER TABLE documents ADD COLUMN file_encryption_algorithm TEXT`] : []),
sql`CREATE INDEX IF NOT EXISTS documents_file_encryption_kek_version_index ON documents (file_encryption_kek_version)`,
];
if (!hasColumn('file_encryption_key_wrapped')) {
await db.schema
.alterTable('documents')
.addColumn('file_encryption_key_wrapped', 'text')
.execute();
}
await db.batch(statements.map(statement => db.run(statement) as BatchItem<'sqlite'>) as [BatchItem<'sqlite'>, ...BatchItem<'sqlite'>[]]);
if (!hasColumn('file_encryption_kek_version')) {
await db.schema
.alterTable('documents')
.addColumn('file_encryption_kek_version', 'text')
.execute();
}
if (!hasColumn('file_encryption_algorithm')) {
await db.schema
.alterTable('documents')
.addColumn('file_encryption_algorithm', 'text')
.execute();
}
await db.schema
.createIndex('documents_file_encryption_kek_version_index')
.ifNotExists()
.on('documents')
.column('file_encryption_kek_version')
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP INDEX IF EXISTS documents_file_encryption_kek_version_index`),
db.run(sql`ALTER TABLE documents DROP COLUMN file_encryption_key_wrapped`),
db.run(sql`ALTER TABLE documents DROP COLUMN file_encryption_kek_version`),
db.run(sql`ALTER TABLE documents DROP COLUMN file_encryption_algorithm`),
]);
await db.schema.dropIndex('documents_file_encryption_kek_version_index').ifExists().execute();
await db.schema.alterTable('documents').dropColumn('file_encryption_key_wrapped').execute();
await db.schema.alterTable('documents').dropColumn('file_encryption_kek_version').execute();
await db.schema.alterTable('documents').dropColumn('file_encryption_algorithm').execute();
},
} satisfies Migration;

View File

@@ -1,36 +1,56 @@
import type { BatchItem } from 'drizzle-orm/batch';
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export const softDeleteOrganizationsMigration = {
name: 'soft-delete-organizations',
up: async ({ db }) => {
const tableInfo = await db.run(sql`PRAGMA table_info(organizations)`);
const tableInfo = await db.executeQuery<{ name: string }>(sql`PRAGMA table_info(organizations)`.compile(db));
const existingColumns = tableInfo.rows.map(row => row.name);
const hasColumn = (columnName: string) => existingColumns.includes(columnName);
const statements = [
...(hasColumn('deleted_by') ? [] : [(sql`ALTER TABLE "organizations" ADD "deleted_by" text REFERENCES users(id);`)]),
...(hasColumn('deleted_at') ? [] : [(sql`ALTER TABLE "organizations" ADD "deleted_at" integer;`)]),
...(hasColumn('scheduled_purge_at') ? [] : [(sql`ALTER TABLE "organizations" ADD "scheduled_purge_at" integer;`)]),
if (!hasColumn('deleted_by')) {
await db.schema
.alterTable('organizations')
.addColumn('deleted_by', 'text', col => col.references('users.id').onDelete('set null').onUpdate('cascade'))
.execute();
}
sql`CREATE INDEX IF NOT EXISTS "organizations_deleted_at_purge_at_index" ON "organizations" ("deleted_at","scheduled_purge_at");`,
sql`CREATE INDEX IF NOT EXISTS "organizations_deleted_by_deleted_at_index" ON "organizations" ("deleted_by","deleted_at");`,
];
if (!hasColumn('deleted_at')) {
await db.schema
.alterTable('organizations')
.addColumn('deleted_at', 'integer')
.execute();
}
await db.batch(statements.map(statement => db.run(statement) as BatchItem<'sqlite'>) as [BatchItem<'sqlite'>, ...BatchItem<'sqlite'>[]]);
if (!hasColumn('scheduled_purge_at')) {
await db.schema
.alterTable('organizations')
.addColumn('scheduled_purge_at', 'integer')
.execute();
}
await db.schema
.createIndex('organizations_deleted_at_purge_at_index')
.ifNotExists()
.on('organizations')
.columns(['deleted_at', 'scheduled_purge_at'])
.execute();
await db.schema
.createIndex('organizations_deleted_by_deleted_at_index')
.ifNotExists()
.on('organizations')
.columns(['deleted_by', 'deleted_at'])
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP INDEX IF EXISTS "organizations_deleted_at_purge_at_index";`),
db.run(sql`DROP INDEX IF EXISTS "organizations_deleted_by_deleted_at_index";`),
await db.schema.dropIndex('organizations_deleted_at_purge_at_index').ifExists().execute();
await db.schema.dropIndex('organizations_deleted_by_deleted_at_index').ifExists().execute();
db.run(sql`ALTER TABLE "organizations" DROP COLUMN "deleted_by";`),
db.run(sql`ALTER TABLE "organizations" DROP COLUMN "deleted_at";`),
db.run(sql`ALTER TABLE "organizations" DROP COLUMN "scheduled_purge_at";`),
]);
await db.schema.alterTable('organizations').dropColumn('deleted_by').execute();
await db.schema.alterTable('organizations').dropColumn('deleted_at').execute();
await db.schema.alterTable('organizations').dropColumn('scheduled_purge_at').execute();
},
} satisfies Migration;

View File

@@ -1,20 +1,24 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { CONDITION_MATCH_MODES } from '../../modules/tagging-rules/tagging-rules.constants';
export const taggingRuleConditionMatchModeMigration = {
name: 'tagging-rule-condition-match-mode',
up: async ({ db }) => {
const tableInfo = await db.run(sql`PRAGMA table_info(tagging_rules)`);
const tableInfo = await db.executeQuery<{ name: string }>(sql`PRAGMA table_info(tagging_rules)`.compile(db));
const existingColumns = tableInfo.rows.map(row => row.name);
const hasColumn = (columnName: string) => existingColumns.includes(columnName);
if (!hasColumn('condition_match_mode')) {
await db.run(sql`ALTER TABLE "tagging_rules" ADD "condition_match_mode" text DEFAULT 'all' NOT NULL;`);
await db.schema
.alterTable('tagging_rules')
.addColumn('condition_match_mode', 'text', col => col.defaultTo(CONDITION_MATCH_MODES.ALL).notNull())
.execute();
}
},
down: async ({ db }) => {
await db.run(sql`ALTER TABLE "tagging_rules" DROP COLUMN "condition_match_mode";`);
await db.schema.alterTable('tagging_rules').dropColumn('condition_match_mode').execute();
},
} satisfies Migration;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,90 +0,0 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1743508385578,
"tag": "0000_initial_schema_setup",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1743508401881,
"tag": "0001_documents_fts",
"breakpoints": true
},
{
"idx": 2,
"version": "6",
"when": 1743938048080,
"tag": "0002_tagging_rules",
"breakpoints": true
},
{
"idx": 3,
"version": "6",
"when": 1745131802627,
"tag": "0003_api-keys",
"breakpoints": true
},
{
"idx": 4,
"version": "6",
"when": 1746297779495,
"tag": "0004_organizations-webhooks",
"breakpoints": true
},
{
"idx": 5,
"version": "6",
"when": 1747575029264,
"tag": "0005_organizations-invitations-improvement",
"breakpoints": true
},
{
"idx": 6,
"version": "6",
"when": 1748554484124,
"tag": "0006_document-activity-log",
"breakpoints": true
},
{
"idx": 7,
"version": "6",
"when": 1754086182584,
"tag": "0007_document-activity-log-on-delete-set-null",
"breakpoints": true
},
{
"idx": 8,
"version": "6",
"when": 1756332437565,
"tag": "0008_document-file-encryption",
"breakpoints": true
},
{
"idx": 9,
"version": "6",
"when": 1756332955747,
"tag": "0009_document-file-encryption",
"breakpoints": true
},
{
"idx": 10,
"version": "6",
"when": 1760016118956,
"tag": "0010_soft-delete-organizations",
"breakpoints": true
},
{
"idx": 11,
"version": "6",
"when": 1761645190314,
"tag": "0011_tagging-rule-condition-match-mode",
"breakpoints": true
}
]
}

View File

@@ -1,14 +0,0 @@
import { index, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core';
export const migrationsTable = sqliteTable(
'migrations',
{
id: integer('id').primaryKey({ autoIncrement: true }),
name: text('name').notNull(),
runAt: integer('run_at', { mode: 'timestamp_ms' }).notNull().$default(() => new Date()),
},
t => [
index('name_index').on(t.name),
index('run_at_index').on(t.runAt),
],
);

View File

@@ -1,6 +1,6 @@
import type { Migration } from './migrations.types';
import { createNoopLogger } from '@crowlog/logger';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../modules/app/database/database';
import { serializeSchema } from '../modules/app/database/database.test-utils';
@@ -30,7 +30,7 @@ describe('migrations registry', () => {
await runMigrations({ db, migrations, logger: createNoopLogger() });
// check foreign keys are enabled
const { rows } = await db.run(sql`pragma foreign_keys;`);
const { rows } = await db.executeQuery<{ foreign_keys: number }>(sql`PRAGMA foreign_keys`.compile(db));
expect(rows).to.eql([{ foreign_keys: 1 }]);
});
@@ -69,59 +69,59 @@ describe('migrations registry', () => {
await runMigrations({ db, migrations, logger: createNoopLogger() });
expect(await serializeSchema({ db })).toMatchInlineSnapshot(`
"CREATE UNIQUE INDEX "api_keys_key_hash_unique" ON "api_keys" ("key_hash");
CREATE INDEX "auth_sessions_token_index" ON "auth_sessions" ("token");
CREATE INDEX "auth_verifications_identifier_index" ON "auth_verifications" ("identifier");
CREATE INDEX documents_file_encryption_kek_version_index ON documents (file_encryption_kek_version);
CREATE INDEX "documents_organization_id_is_deleted_created_at_index" ON "documents" ("organization_id","is_deleted","created_at");
CREATE INDEX "documents_organization_id_is_deleted_index" ON "documents" ("organization_id","is_deleted");
CREATE UNIQUE INDEX "documents_organization_id_original_sha256_hash_unique" ON "documents" ("organization_id","original_sha256_hash");
CREATE INDEX "documents_organization_id_size_index" ON "documents" ("organization_id","original_size");
CREATE INDEX "documents_original_sha256_hash_index" ON "documents" ("original_sha256_hash");
CREATE UNIQUE INDEX "intake_emails_email_address_unique" ON "intake_emails" ("email_address");
CREATE INDEX "key_hash_index" ON "api_keys" ("key_hash");
CREATE INDEX migrations_name_index ON migrations (name);
CREATE INDEX migrations_run_at_index ON migrations (run_at);
CREATE UNIQUE INDEX "organization_invitations_organization_email_unique" ON "organization_invitations" ("organization_id","email");
CREATE UNIQUE INDEX "organization_members_user_organization_unique" ON "organization_members" ("organization_id","user_id");
CREATE INDEX "organizations_deleted_at_purge_at_index" ON "organizations" ("deleted_at","scheduled_purge_at");
CREATE INDEX "organizations_deleted_by_deleted_at_index" ON "organizations" ("deleted_by","deleted_at");
CREATE UNIQUE INDEX "tags_organization_id_name_unique" ON "tags" ("organization_id","name");
CREATE INDEX "user_roles_role_index" ON "user_roles" ("role");
CREATE UNIQUE INDEX "user_roles_user_id_role_unique_index" ON "user_roles" ("user_id","role");
CREATE INDEX "users_email_index" ON "users" ("email");
CREATE UNIQUE INDEX "users_email_unique" ON "users" ("email");
CREATE UNIQUE INDEX "webhook_events_webhook_id_event_name_unique" ON "webhook_events" ("webhook_id","event_name");
CREATE TABLE "api_key_organizations" ( "api_key_id" text NOT NULL, "organization_member_id" text NOT NULL, FOREIGN KEY ("api_key_id") REFERENCES "api_keys"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("organization_member_id") REFERENCES "organization_members"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "api_keys" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "key_hash" text NOT NULL, "prefix" text NOT NULL, "user_id" text NOT NULL, "last_used_at" integer, "expires_at" integer, "permissions" text DEFAULT '[]' NOT NULL, "all_organizations" integer DEFAULT false NOT NULL, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "auth_accounts" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "user_id" text, "account_id" text NOT NULL, "provider_id" text NOT NULL, "access_token" text, "refresh_token" text, "access_token_expires_at" integer, "refresh_token_expires_at" integer, "scope" text, "id_token" text, "password" text, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "auth_sessions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "token" text NOT NULL, "user_id" text, "expires_at" integer NOT NULL, "ip_address" text, "user_agent" text, "active_organization_id" text, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("active_organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE set null );
CREATE TABLE "auth_verifications" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "identifier" text NOT NULL, "value" text NOT NULL, "expires_at" integer NOT NULL );
CREATE TABLE "document_activity_log" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "document_id" text NOT NULL, "event" text NOT NULL, "event_data" text, "user_id" text, "tag_id" text, FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE set null );
CREATE TABLE "documents" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "is_deleted" integer DEFAULT false NOT NULL, "deleted_at" integer, "organization_id" text NOT NULL, "created_by" text, "deleted_by" text, "original_name" text NOT NULL, "original_size" integer DEFAULT 0 NOT NULL, "original_storage_key" text NOT NULL, "original_sha256_hash" text NOT NULL, "name" text NOT NULL, "mime_type" text NOT NULL, "content" text DEFAULT '' NOT NULL, file_encryption_key_wrapped TEXT, file_encryption_kek_version TEXT, file_encryption_algorithm TEXT, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("deleted_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null );
"CREATE UNIQUE INDEX "api_keys_key_hash_unique" on "api_keys" ("key_hash");
CREATE INDEX "auth_sessions_token_index" on "auth_sessions" ("token");
CREATE INDEX "auth_verifications_identifier_index" on "auth_verifications" ("identifier");
CREATE INDEX "documents_file_encryption_kek_version_index" on "documents" ("file_encryption_kek_version");
CREATE INDEX "documents_organization_id_is_deleted_created_at_index" on "documents" ("organization_id", "is_deleted", "created_at");
CREATE INDEX "documents_organization_id_is_deleted_index" on "documents" ("organization_id", "is_deleted");
CREATE UNIQUE INDEX "documents_organization_id_original_sha256_hash_unique" on "documents" ("organization_id", "original_sha256_hash");
CREATE INDEX "documents_organization_id_size_index" on "documents" ("organization_id", "original_size");
CREATE INDEX "documents_original_sha256_hash_index" on "documents" ("original_sha256_hash");
CREATE UNIQUE INDEX "intake_emails_email_address_unique" on "intake_emails" ("email_address");
CREATE INDEX "key_hash_index" on "api_keys" ("key_hash");
CREATE INDEX "migrations_name_index" on "migrations" ("name");
CREATE INDEX "migrations_run_at_index" on "migrations" ("run_at");
CREATE UNIQUE INDEX "organization_invitations_organization_email_unique" on "organization_invitations" ("organization_id", "email");
CREATE UNIQUE INDEX "organization_members_user_organization_unique" on "organization_members" ("organization_id", "user_id");
CREATE INDEX "organizations_deleted_at_purge_at_index" on "organizations" ("deleted_at", "scheduled_purge_at");
CREATE INDEX "organizations_deleted_by_deleted_at_index" on "organizations" ("deleted_by", "deleted_at");
CREATE UNIQUE INDEX "tags_organization_id_name_unique" on "tags" ("organization_id", "name");
CREATE INDEX "user_roles_role_index" on "user_roles" ("role");
CREATE UNIQUE INDEX "user_roles_user_id_role_unique_index" on "user_roles" ("user_id", "role");
CREATE INDEX "users_email_index" on "users" ("email");
CREATE UNIQUE INDEX "users_email_unique" on "users" ("email");
CREATE UNIQUE INDEX "webhook_events_webhook_id_event_name_unique" on "webhook_events" ("webhook_id", "event_name");
CREATE TABLE "api_key_organizations" ("api_key_id" text not null references "api_keys" ("id") on delete cascade on update cascade, "organization_member_id" text not null references "organization_members" ("id") on delete cascade on update cascade);
CREATE TABLE "api_keys" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "name" text not null, "key_hash" text not null, "prefix" text not null, "user_id" text not null references "users" ("id") on delete cascade on update cascade, "last_used_at" integer, "expires_at" integer, "permissions" text default '[]' not null, "all_organizations" integer default 0 not null);
CREATE TABLE "auth_accounts" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "user_id" text references "users" ("id") on delete cascade on update cascade, "account_id" text not null, "provider_id" text not null, "access_token" text, "refresh_token" text, "access_token_expires_at" integer, "refresh_token_expires_at" integer, "scope" text, "id_token" text, "password" text);
CREATE TABLE "auth_sessions" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "token" text not null, "user_id" text references "users" ("id") on delete cascade on update cascade, "expires_at" integer not null, "ip_address" text, "user_agent" text, "active_organization_id" text references "organizations" ("id") on delete set null on update cascade);
CREATE TABLE "auth_verifications" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "identifier" text not null, "value" text not null, "expires_at" integer not null);
CREATE TABLE "document_activity_log" ("id" text not null primary key, "created_at" integer not null, "document_id" text not null references "documents" ("id") on delete cascade on update cascade, "event" text not null, "event_data" text, "user_id" text references "users" ("id") on delete set null on update cascade, "tag_id" text references "tags" ("id") on delete set null on update cascade);
CREATE TABLE "documents" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "is_deleted" integer default 0 not null, "deleted_at" integer, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "created_by" text references "users" ("id") on delete set null on update cascade, "deleted_by" text references "users" ("id") on delete set null on update cascade, "original_name" text not null, "original_size" integer default 0 not null, "original_storage_key" text not null, "original_sha256_hash" text not null, "name" text not null, "mime_type" text not null, "content" text default '' not null, "file_encryption_key_wrapped" text, "file_encryption_kek_version" text, "file_encryption_algorithm" text);
CREATE VIRTUAL TABLE documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4');
CREATE TABLE 'documents_fts_config'(k PRIMARY KEY, v) WITHOUT ROWID;
CREATE TABLE 'documents_fts_content'(id INTEGER PRIMARY KEY, c0, c1, c2, c3);
CREATE TABLE 'documents_fts_data'(id INTEGER PRIMARY KEY, block BLOB);
CREATE TABLE 'documents_fts_docsize'(id INTEGER PRIMARY KEY, sz BLOB);
CREATE TABLE 'documents_fts_idx'(segid, term, pgno, PRIMARY KEY(segid, term)) WITHOUT ROWID;
CREATE TABLE "documents_tags" ( "document_id" text NOT NULL, "tag_id" text NOT NULL, PRIMARY KEY("document_id", "tag_id"), FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "intake_emails" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "email_address" text NOT NULL, "organization_id" text NOT NULL, "allowed_origins" text DEFAULT '[]' NOT NULL, "is_enabled" integer DEFAULT true NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, run_at INTEGER NOT NULL);
CREATE TABLE "organization_invitations" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "email" text NOT NULL, "role" text NOT NULL, "status" text NOT NULL DEFAULT 'pending', "expires_at" integer NOT NULL, "inviter_id" text NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("inviter_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organization_members" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "user_id" text NOT NULL, "role" text NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organization_subscriptions" ( "id" text PRIMARY KEY NOT NULL, "customer_id" text NOT NULL, "organization_id" text NOT NULL, "plan_id" text NOT NULL, "status" text NOT NULL, "seats_count" integer NOT NULL, "current_period_end" integer NOT NULL, "current_period_start" integer NOT NULL, "cancel_at_period_end" integer DEFAULT false NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organizations" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "customer_id" text , "deleted_by" text REFERENCES users(id), "deleted_at" integer, "scheduled_purge_at" integer);
CREATE TABLE "documents_tags" ("document_id" text not null references "documents" ("id") on delete cascade on update cascade, "tag_id" text not null references "tags" ("id") on delete cascade on update cascade, constraint "documents_tags_pkey" primary key ("document_id", "tag_id"));
CREATE TABLE "intake_emails" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "email_address" text not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "allowed_origins" text default '[]' not null, "is_enabled" integer default 1 not null);
CREATE TABLE "migrations" ("id" integer primary key autoincrement, "name" text not null, "run_at" integer not null);
CREATE TABLE "organization_invitations" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "email" text not null, "role" text not null, "status" text not null DEFAULT 'pending', "expires_at" integer not null, "inviter_id" text not null references "users" ("id") on delete cascade on update cascade);
CREATE TABLE "organization_members" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "user_id" text not null references "users" ("id") on delete cascade on update cascade, "role" text not null);
CREATE TABLE "organization_subscriptions" ("id" text not null primary key, "customer_id" text not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "plan_id" text not null, "status" text not null, "seats_count" integer not null, "current_period_end" integer not null, "current_period_start" integer not null, "cancel_at_period_end" integer default 0 not null, "created_at" integer not null, "updated_at" integer not null);
CREATE TABLE "organizations" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "name" text not null, "customer_id" text, "deleted_by" text references "users" ("id") on delete set null on update cascade, "deleted_at" integer, "scheduled_purge_at" integer);
CREATE TABLE sqlite_sequence(name,seq);
CREATE TABLE "tagging_rule_actions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "tagging_rule_id" text NOT NULL, "tag_id" text NOT NULL, FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tagging_rule_conditions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "tagging_rule_id" text NOT NULL, "field" text NOT NULL, "operator" text NOT NULL, "value" text NOT NULL, "is_case_sensitive" integer DEFAULT false NOT NULL, FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tagging_rules" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "name" text NOT NULL, "description" text, "enabled" integer DEFAULT true NOT NULL, "condition_match_mode" text DEFAULT 'all' NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tags" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "name" text NOT NULL, "color" text NOT NULL, "description" text, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "user_roles" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "user_id" text NOT NULL, "role" text NOT NULL, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "users" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "email" text NOT NULL, "email_verified" integer DEFAULT false NOT NULL, "name" text, "image" text, "max_organization_count" integer );
CREATE TABLE "webhook_deliveries" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "webhook_id" text NOT NULL, "event_name" text NOT NULL, "request_payload" text NOT NULL, "response_payload" text NOT NULL, "response_status" integer NOT NULL, FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "webhook_events" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "webhook_id" text NOT NULL, "event_name" text NOT NULL, FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "webhooks" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "url" text NOT NULL, "secret" text, "enabled" integer DEFAULT true NOT NULL, "created_by" text, "organization_id" text, FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tagging_rule_actions" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "tagging_rule_id" text not null references "tagging_rules" ("id") on delete cascade on update cascade, "tag_id" text not null references "tags" ("id") on delete cascade on update cascade);
CREATE TABLE "tagging_rule_conditions" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "tagging_rule_id" text not null references "tagging_rules" ("id") on delete cascade on update cascade, "field" text not null, "operator" text not null, "value" text not null, "is_case_sensitive" integer default 0 not null);
CREATE TABLE "tagging_rules" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "name" text not null, "description" text, "enabled" integer default 1 not null, "condition_match_mode" text default 'all' not null);
CREATE TABLE "tags" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "organization_id" text not null references "organizations" ("id") on delete cascade on update cascade, "name" text not null, "color" text not null, "description" text);
CREATE TABLE "user_roles" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "user_id" text not null references "users" ("id") on delete cascade on update cascade, "role" text not null);
CREATE TABLE "users" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "email" text not null, "email_verified" integer default 0 not null, "name" text, "image" text, "max_organization_count" integer);
CREATE TABLE "webhook_deliveries" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "webhook_id" text not null references "webhooks" ("id") on delete cascade on update cascade, "event_name" text not null, "request_payload" text not null, "response_payload" text not null, "response_status" integer not null);
CREATE TABLE "webhook_events" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "webhook_id" text not null references "webhooks" ("id") on delete cascade on update cascade, "event_name" text not null);
CREATE TABLE "webhooks" ("id" text not null primary key, "created_at" integer not null, "updated_at" integer not null, "name" text not null, "url" text not null, "secret" text, "enabled" integer default 1 not null, "created_by" text references "users" ("id") on delete set null on update cascade, "organization_id" text references "organizations" ("id") on delete cascade on update cascade);
CREATE TRIGGER trigger_documents_fts_delete AFTER DELETE ON documents BEGIN DELETE FROM documents_fts WHERE id = old.id; END;
CREATE TRIGGER trigger_documents_fts_insert AFTER INSERT ON documents BEGIN INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content); END;
CREATE TRIGGER trigger_documents_fts_update AFTER UPDATE ON documents BEGIN UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id; END;"
@@ -136,7 +136,7 @@ describe('migrations registry', () => {
const dbState = await serializeSchema({ db });
await db.run(sql`DROP TABLE migrations`);
await db.executeQuery(sql`DROP TABLE migrations`.compile(db));
await runMigrations({ db, migrations, logger: createNoopLogger() });
expect(await serializeSchema({ db })).to.eq(dbState);

View File

@@ -1,29 +1,54 @@
import type { Database } from '../modules/app/database/database.types';
import { asc, eq, sql } from 'drizzle-orm';
import { migrationsTable } from './migration.tables';
import type { DatabaseClient } from '../modules/app/database/database.types';
export async function setupMigrationTableIfNotExists({ db }: { db: Database }) {
await db.batch([
db.run(sql`CREATE TABLE IF NOT EXISTS migrations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, run_at INTEGER NOT NULL)`),
db.run(sql`CREATE INDEX IF NOT EXISTS migrations_name_index ON migrations (name)`),
db.run(sql`CREATE INDEX IF NOT EXISTS migrations_run_at_index ON migrations (run_at)`),
]);
export async function setupMigrationTableIfNotExists({ db }: { db: DatabaseClient }) {
await db.schema
.createTable('migrations')
.ifNotExists()
.addColumn('id', 'integer', col => col.primaryKey().autoIncrement())
.addColumn('name', 'text', col => col.notNull())
.addColumn('run_at', 'integer', col => col.notNull())
.execute();
await db.schema
.createIndex('migrations_name_index')
.ifNotExists()
.on('migrations')
.columns(['name'])
.execute();
await db.schema
.createIndex('migrations_run_at_index')
.ifNotExists()
.on('migrations')
.columns(['run_at'])
.execute();
}
export async function getMigrations({ db }: { db: Database }) {
const migrations = await db.select().from(migrationsTable).orderBy(asc(migrationsTable.runAt));
export async function getMigrations({ db }: { db: DatabaseClient }) {
const dbMigrations = await db.selectFrom('migrations').selectAll().orderBy('run_at', 'asc').execute();
return { migrations };
return {
migrations: dbMigrations.map(migration => ({
...migration,
runAt: new Date(migration.run_at),
})),
};
}
export async function saveMigration({ db, migrationName, now = new Date() }: { db: Database; migrationName: string; now?: Date }) {
await db.insert(migrationsTable).values({ name: migrationName, runAt: now });
export async function saveMigration({ db, migrationName, now = new Date() }: { db: DatabaseClient; migrationName: string; now?: Date }) {
await db
.insertInto('migrations')
.values({
name: migrationName,
run_at: now.getTime(),
})
.execute();
}
export async function deleteMigration({ db, migrationName }: { db: Database; migrationName: string }) {
await db.delete(migrationsTable).where(eq(migrationsTable.name, migrationName));
export async function deleteMigration({ db, migrationName }: { db: DatabaseClient; migrationName: string }) {
await db.deleteFrom('migrations').where('name', '=', migrationName).execute();
}
export async function deleteAllMigrations({ db }: { db: Database }) {
await db.delete(migrationsTable);
export async function deleteAllMigrations({ db }: { db: DatabaseClient }) {
await db.deleteFrom('migrations').execute();
}

View File

@@ -0,0 +1,18 @@
import type { ColumnType, Generated, Insertable, Selectable, Updateable } from 'kysely';
export type MigrationTable = {
id: Generated<number>;
name: string;
run_at: ColumnType<number, number | undefined, never>;
};
export type DbSelectableMigration = Selectable<MigrationTable>;
export type DbInsertableMigration = Insertable<MigrationTable>;
export type DbUpdatableMigration = Updateable<MigrationTable>;
export type InsertableMigration = Omit<DbInsertableMigration, 'id' | 'run_at'>;
export type Migration = {
id: number;
name: string;
runAt: Date;
};

View File

@@ -1,7 +1,7 @@
import type { Database } from '../modules/app/database/database.types';
import type { DatabaseClient } from '../modules/app/database/database.types';
export type MigrationArguments = {
db: Database;
db: DatabaseClient;
};
export type Migration = {

View File

@@ -1,49 +1,78 @@
import type { DatabaseClient } from '../modules/app/database/database.types';
import type { Migration } from './migrations.types';
import { createNoopLogger } from '@crowlog/logger';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../modules/app/database/database';
import { migrationsTable } from './migration.tables';
import { rollbackLastAppliedMigration, runMigrations } from './migrations.usecases';
const createTableUserMigration: Migration = {
name: 'create-table-user',
up: async ({ db }) => {
await db.run(sql`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL)`);
await db.schema
.createTable('users')
.addColumn('id', 'integer', col => col.primaryKey().autoIncrement())
.addColumn('name', 'text', col => col.notNull())
.execute();
},
down: async ({ db }) => {
await db.run(sql`DROP TABLE users`);
await db.schema
.dropTable('users')
.execute();
},
};
const createTableOrganizationMigration: Migration = {
name: 'create-table-organization',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE TABLE organizations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL)`),
db.run(sql`CREATE TABLE organization_members (id INTEGER PRIMARY KEY AUTOINCREMENT, organization_id INTEGER NOT NULL, user_id INTEGER NOT NULL, role TEXT NOT NULL, created_at INTEGER NOT NULL)`),
]);
await db.schema
.createTable('organizations')
.addColumn('id', 'integer', col => col.primaryKey().autoIncrement())
.addColumn('name', 'text', col => col.notNull())
.execute();
await db.schema
.createTable('organization_members')
.addColumn('id', 'integer', col => col.primaryKey().autoIncrement())
.addColumn('organization_id', 'integer', col => col.notNull())
.addColumn('user_id', 'integer', col => col.notNull())
.addColumn('role', 'text', col => col.notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.execute();
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE organizations`),
db.run(sql`DROP TABLE organization_members`),
]);
await db.schema
.dropTable('organization_members')
.execute();
await db.schema
.dropTable('organizations')
.execute();
},
};
const createTableDocumentMigration: Migration = {
name: 'create-table-document',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE TABLE documents (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, created_at INTEGER NOT NULL)`),
]);
await db.schema
.createTable('documents')
.addColumn('id', 'integer', col => col.primaryKey().autoIncrement())
.addColumn('name', 'text', col => col.notNull())
.addColumn('created_at', 'integer', col => col.notNull())
.execute();
},
down: async ({ db }) => {
await db.run(sql`DROP TABLE documents`);
await db.schema
.dropTable('documents')
.execute();
},
};
async function getTablesNames({ db }: { db: DatabaseClient }) {
const { rows: tables } = await db.executeQuery<{ name: string }>(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`.compile(db));
return tables.map(({ name }) => name);
}
describe('migrations usecases', () => {
describe('runMigrations', () => {
test('should run all migrations that are not already applied', async () => {
@@ -53,7 +82,7 @@ describe('migrations usecases', () => {
await runMigrations({ db, migrations, logger: createNoopLogger() });
const migrationsInDb = await db.select().from(migrationsTable);
const migrationsInDb = await db.selectFrom('migrations').selectAll().execute();
expect(migrationsInDb.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
@@ -64,7 +93,7 @@ describe('migrations usecases', () => {
await runMigrations({ db, migrations, logger: createNoopLogger() });
const migrationsInDb2 = await db.select().from(migrationsTable);
const migrationsInDb2 = await db.selectFrom('migrations').selectAll().execute();
expect(migrationsInDb2.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
@@ -72,10 +101,8 @@ describe('migrations usecases', () => {
{ id: 3, name: 'create-table-document' },
]);
const { rows: tables } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
// Ensure all tables and indexes are created
expect(tables.map(t => t.name)).to.eql([
expect(await getTablesNames({ db })).to.eql([
'migrations',
'migrations_name_index',
'migrations_run_at_index',
@@ -95,7 +122,7 @@ describe('migrations usecases', () => {
await runMigrations({ db, migrations, logger: createNoopLogger() });
const initialMigrations = await db.select().from(migrationsTable);
const initialMigrations = await db.selectFrom('migrations').selectAll().execute();
expect(initialMigrations.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
@@ -103,20 +130,22 @@ describe('migrations usecases', () => {
]);
// Ensure the tables exists, no error is thrown
await db.run(sql`SELECT * FROM users`);
await db.run(sql`SELECT * FROM documents`);
await db.selectFrom('users').selectAll().execute();
await db.selectFrom('documents').selectAll().execute();
await rollbackLastAppliedMigration({ db, migrations });
const migrationsInDb = await db.select().from(migrationsTable);
const migrationsInDb = await db.selectFrom('migrations').selectAll().execute();
expect(migrationsInDb.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
]);
// Ensure the table document is dropped
await db.run(sql`SELECT * FROM users`);
await expect(db.run(sql`SELECT * FROM documents`)).rejects.toThrow();
await db.selectFrom('users').selectAll().execute();
await expect(
db.selectFrom('documents').selectAll().execute(),
).rejects.toThrow();
});
test('when their is no migration to rollback, nothing is done', async () => {
@@ -124,7 +153,7 @@ describe('migrations usecases', () => {
await rollbackLastAppliedMigration({ db });
const migrationsInDb = await db.select().from(migrationsTable);
const migrationsInDb = await db.selectFrom('migrations').selectAll().execute();
expect(migrationsInDb).to.eql([]);
});

View File

@@ -1,4 +1,4 @@
import type { Database } from '../modules/app/database/database.types';
import type { DatabaseClient } from '../modules/app/database/database.types';
import type { Logger } from '../modules/shared/logger/logger';
import type { Migration } from './migrations.types';
import { safely } from '@corentinth/chisels';
@@ -6,7 +6,7 @@ import { createLogger } from '../modules/shared/logger/logger';
import { migrations as migrationsList } from './migrations.registry';
import { deleteMigration, getMigrations, saveMigration, setupMigrationTableIfNotExists } from './migrations.repository';
export async function runMigrations({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: Database; migrations?: Migration[]; logger?: Logger }) {
export async function runMigrations({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: DatabaseClient; migrations?: Migration[]; logger?: Logger }) {
await setupMigrationTableIfNotExists({ db });
if (migrations.length === 0) {
@@ -45,14 +45,14 @@ export async function runMigrations({ db, migrations = migrationsList, logger =
logger.info('All migrations run successfully');
}
async function upMigration({ db, migration }: { db: Database; migration: Migration }) {
async function upMigration({ db, migration }: { db: DatabaseClient; migration: Migration }) {
const { name, up } = migration;
await up({ db });
await saveMigration({ db, migrationName: name });
}
export async function rollbackLastAppliedMigration({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: Database; migrations?: Migration[]; logger?: Logger }) {
export async function rollbackLastAppliedMigration({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: DatabaseClient; migrations?: Migration[]; logger?: Logger }) {
await setupMigrationTableIfNotExists({ db });
const { migrations: existingMigrations } = await getMigrations({ db });
@@ -75,7 +75,7 @@ export async function rollbackLastAppliedMigration({ db, migrations = migrations
logger.info({ migrationName: lastMigration.name }, 'Migration rolled back successfully');
}
async function downMigration({ db, migration }: { db: Database; migration: Migration }) {
async function downMigration({ db, migration }: { db: DatabaseClient; migration: Migration }) {
const { name, down } = migration;
await down?.({ db });

View File

@@ -1,4 +1,4 @@
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { Context } from '../app/server.types';
import { createMiddleware } from 'hono/factory';
import { createUnauthorizedError } from '../app/auth/auth.errors';
@@ -10,7 +10,7 @@ import { getApiKey } from './api-keys.usecases';
// The role of this middleware is to extract the api key from the authorization header if present
// and set it on the context, no auth enforcement is done here
export function createApiKeyMiddleware({ db }: { db: Database }) {
export function createApiKeyMiddleware({ db }: { db: DatabaseClient }) {
const apiKeyRepository = createApiKeysRepository({ db });
return createMiddleware(async (context: Context, next) => {

View File

@@ -1,6 +1,20 @@
import type {
ApiKey,
ApiKeyOrganization,
DbInsertableApiKey,
DbInsertableApiKeyOrganization,
DbSelectableApiKey,
DbSelectableApiKeyOrganization,
InsertableApiKey,
InsertableApiKeyOrganization,
} from './api-keys.new.tables';
import { sha256 } from '../shared/crypto/hash';
import { generateId } from '../shared/random/ids';
import { isNil } from '../shared/utils';
import { API_KEY_PREFIX, API_KEY_TOKEN_REGEX } from './api-keys.constants';
import { API_KEY_ID_PREFIX, API_KEY_PREFIX, API_KEY_TOKEN_REGEX } from './api-keys.constants';
import { apiPermissionsSchema } from './api-keys.schemas';
const generateApiKeyId = () => generateId({ prefix: API_KEY_ID_PREFIX });
export function getApiKeyUiPrefix({ token }: { token: string }) {
return {
@@ -22,3 +36,72 @@ export function looksLikeAnApiKey(token?: string | null | undefined): token is s
return API_KEY_TOKEN_REGEX.test(token);
}
// DB <-> Business model transformers
export function dbToApiKey(dbApiKey: Omit<DbSelectableApiKey, 'key_hash'>): Omit<ApiKey, 'keyHash'>;
export function dbToApiKey(dbApiKey: DbSelectableApiKey): ApiKey;
export function dbToApiKey(dbApiKey?: DbSelectableApiKey | Omit<DbSelectableApiKey, 'key_hash'>): ApiKey | undefined | Omit<ApiKey, 'keyHash'> {
if (!dbApiKey) {
return undefined;
}
return {
id: dbApiKey.id,
name: dbApiKey.name,
...('key_hash' in dbApiKey ? { keyHash: dbApiKey.key_hash } : {}),
prefix: dbApiKey.prefix,
userId: dbApiKey.user_id,
permissions: apiPermissionsSchema.parse(dbApiKey.permissions),
allOrganizations: dbApiKey.all_organizations === 1,
createdAt: new Date(dbApiKey.created_at),
updatedAt: new Date(dbApiKey.updated_at),
lastUsedAt: isNil(dbApiKey.last_used_at) ? null : new Date(dbApiKey.last_used_at),
expiresAt: isNil(dbApiKey.expires_at) ? null : new Date(dbApiKey.expires_at),
};
}
export function apiKeyToDb(
apiKey: InsertableApiKey,
{
now = new Date(),
generateId = generateApiKeyId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableApiKey {
return {
id: apiKey.id ?? generateId(),
name: apiKey.name,
key_hash: apiKey.keyHash,
prefix: apiKey.prefix,
user_id: apiKey.userId,
permissions: JSON.stringify(apiKey.permissions ?? []),
all_organizations: apiKey.allOrganizations === true ? 1 : 0,
created_at: apiKey.createdAt?.getTime() ?? now.getTime(),
updated_at: apiKey.updatedAt?.getTime() ?? now.getTime(),
last_used_at: apiKey.lastUsedAt?.getTime(),
expires_at: apiKey.expiresAt?.getTime(),
};
}
// API Key Organizations junction table transformers
export function dbToApiKeyOrganization(dbApiKeyOrg?: DbSelectableApiKeyOrganization): ApiKeyOrganization | undefined {
if (!dbApiKeyOrg) {
return undefined;
}
return {
apiKeyId: dbApiKeyOrg.api_key_id,
organizationMemberId: dbApiKeyOrg.organization_member_id,
};
}
export function apiKeyOrganizationToDb(apiKeyOrg: InsertableApiKeyOrganization): DbInsertableApiKeyOrganization {
return {
api_key_id: apiKeyOrg.apiKeyId,
organization_member_id: apiKeyOrg.organizationMemberId,
};
}

View File

@@ -0,0 +1,51 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../app/database/database.columns.types';
import type { ApiKeyPermissions } from './api-keys.types';
// --- API Keys
export type ApiKeysTable = TableWithIdAndTimestamps<{
name: string;
key_hash: string;
prefix: string;
user_id: string;
last_used_at: number | null;
expires_at: number | null;
permissions: string;
all_organizations: number;
}>;
export type DbSelectableApiKey = Selectable<ApiKeysTable>;
export type DbInsertableApiKey = Insertable<ApiKeysTable>;
export type DbUpdateableApiKey = Updateable<ApiKeysTable>;
export type InsertableApiKey = BusinessInsertable<DbInsertableApiKey, {
permissions?: ApiKeyPermissions[];
allOrganizations?: boolean;
lastUsedAt?: Date | null;
expiresAt?: Date | null;
}>;
export type ApiKey = Expand<CamelCaseKeys<Omit<DbSelectableApiKey, 'created_at' | 'updated_at' | 'permissions' | 'all_organizations' | 'last_used_at' | 'expires_at'> & {
createdAt: Date;
updatedAt: Date;
permissions: ApiKeyPermissions[];
allOrganizations: boolean;
lastUsedAt: Date | null;
expiresAt: Date | null;
}>>;
// --- API Key Organizations (Junction Table)
export type ApiKeyOrganizationsTable = {
api_key_id: string;
organization_member_id: string;
};
export type DbSelectableApiKeyOrganization = Selectable<ApiKeyOrganizationsTable>;
export type DbInsertableApiKeyOrganization = Insertable<ApiKeyOrganizationsTable>;
export type DbUpdateableApiKeyOrganization = Updateable<ApiKeyOrganizationsTable>;
export type InsertableApiKeyOrganization = Expand<CamelCaseKeys<DbInsertableApiKeyOrganization>>;
export type ApiKeyOrganization = Expand<CamelCaseKeys<DbSelectableApiKeyOrganization>>;

View File

@@ -1,17 +1,16 @@
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { Logger } from '../shared/logger/logger';
import type { ApiKeyPermissions } from './api-keys.types';
import { injectArguments } from '@corentinth/chisels';
import { and, eq, getTableColumns, inArray } from 'drizzle-orm';
import { omit, pick } from 'lodash-es';
import { organizationMembersTable, organizationsTable } from '../organizations/organizations.table';
import { pick } from 'lodash-es';
import { dbToOrganizationMember } from '../organizations/organizations.models';
import { createError } from '../shared/errors/errors';
import { createLogger } from '../shared/logger/logger';
import { apiKeyOrganizationsTable, apiKeysTable } from './api-keys.tables';
import { apiKeyOrganizationToDb, apiKeyToDb, dbToApiKey } from './api-keys.models';
export type ApiKeysRepository = ReturnType<typeof createApiKeysRepository>;
export function createApiKeysRepository({ db, logger = createLogger({ namespace: 'api-keys.repository' }) }: { db: Database; logger?: Logger }) {
export function createApiKeysRepository({ db, logger = createLogger({ namespace: 'api-keys.repository' }) }: { db: DatabaseClient; logger?: Logger }) {
return injectArguments(
{
saveApiKey,
@@ -35,7 +34,7 @@ async function saveApiKey({
organizationIds,
expiresAt,
}: {
db: Database;
db: DatabaseClient;
logger: Logger;
name: string;
keyHash: string;
@@ -46,9 +45,9 @@ async function saveApiKey({
expiresAt?: Date;
userId: string;
}) {
const [apiKey] = await db
.insert(apiKeysTable)
.values({
const dbApiKey = await db
.insertInto('api_keys')
.values(apiKeyToDb({
name,
keyHash,
prefix,
@@ -56,10 +55,11 @@ async function saveApiKey({
allOrganizations,
userId,
expiresAt,
})
.returning();
}))
.returningAll()
.executeTakeFirst();
if (!apiKey) {
if (!dbApiKey) {
// Very unlikely to happen as the insertion should throw an issue, it's for type safety
throw createError({
message: 'Error while creating api key',
@@ -69,18 +69,19 @@ async function saveApiKey({
});
}
const apiKey = dbToApiKey(dbApiKey);
if (organizationIds && organizationIds.length > 0) {
const apiKeyId = apiKey.id;
const organizationMembers = await db
.select()
.from(organizationMembersTable)
.where(
and(
inArray(organizationMembersTable.organizationId, organizationIds),
eq(organizationMembersTable.userId, userId),
),
);
const dbOrganizationMembers = await db
.selectFrom('organization_members')
.where('organization_id', 'in', organizationIds)
.where('user_id', '=', userId)
.selectAll()
.execute();
const organizationMembers = dbOrganizationMembers.map(dbOm => (dbToOrganizationMember(dbOm)));
if (!organizationIds.every(id => organizationMembers.some(om => om.organizationId === id))) {
logger.warn({
@@ -91,41 +92,44 @@ async function saveApiKey({
}
await db
.insert(apiKeyOrganizationsTable)
.insertInto('api_key_organizations')
.values(
organizationMembers.map(({ id: organizationMemberId }) => ({ apiKeyId, organizationMemberId })),
);
organizationMembers.map(({ id: organizationMemberId }) => apiKeyOrganizationToDb({ apiKeyId, organizationMemberId })),
)
.execute();
}
return { apiKey };
}
async function getUserApiKeys({ userId, db }: { userId: string; db: Database }) {
const apiKeys = await db
.select({
...omit(getTableColumns(apiKeysTable), 'keyHash'),
})
.from(apiKeysTable)
.where(
eq(apiKeysTable.userId, userId),
);
async function getUserApiKeys({ userId, db }: { userId: string; db: DatabaseClient }) {
const dbApiKeys = await db
.selectFrom('api_keys')
.where('user_id', '=', userId)
.select(['api_keys.id', 'api_keys.user_id', 'api_keys.name', 'api_keys.prefix', 'api_keys.last_used_at', 'api_keys.expires_at', 'api_keys.permissions', 'api_keys.all_organizations', 'api_keys.created_at', 'api_keys.updated_at'])
.execute()
.then(rows => rows.map(row => dbToApiKey(row)));
const relatedOrganizations = await db
.select({
...getTableColumns(organizationsTable),
apiKeyId: apiKeyOrganizationsTable.apiKeyId,
})
.from(apiKeyOrganizationsTable)
.leftJoin(organizationMembersTable, eq(apiKeyOrganizationsTable.organizationMemberId, organizationMembersTable.id))
.leftJoin(organizationsTable, eq(organizationMembersTable.organizationId, organizationsTable.id))
.where(
and(
inArray(apiKeyOrganizationsTable.apiKeyId, apiKeys.map(apiKey => apiKey.id)),
eq(organizationMembersTable.userId, userId),
),
);
.selectFrom('api_key_organizations')
.innerJoin('organization_members', 'api_key_organizations.organization_member_id', 'organization_members.id')
.innerJoin('organizations', 'organization_members.organization_id', 'organizations.id')
.where('api_key_organizations.api_key_id', 'in', dbApiKeys.map(apiKey => apiKey.id))
.where('organization_members.user_id', '=', userId)
.select([
'organizations.id',
'organizations.name',
'organizations.customer_id',
'organizations.deleted_at',
'organizations.deleted_by',
'organizations.scheduled_purge_at',
'organizations.created_at',
'organizations.updated_at',
'api_key_organizations.api_key_id as apiKeyId',
])
.execute();
const apiKeysWithOrganizations = apiKeys.map(apiKey => ({
const apiKeysWithOrganizations = dbApiKeys.map(apiKey => ({
...apiKey,
organizations: relatedOrganizations
.filter(organization => organization.apiKeyId === apiKey.id)
@@ -137,24 +141,22 @@ async function getUserApiKeys({ userId, db }: { userId: string; db: Database })
};
}
async function deleteUserApiKey({ apiKeyId, userId, db }: { apiKeyId: string; userId: string; db: Database }) {
async function deleteUserApiKey({ apiKeyId, userId, db }: { apiKeyId: string; userId: string; db: DatabaseClient }) {
await db
.delete(apiKeysTable)
.where(
and(
eq(apiKeysTable.id, apiKeyId),
eq(apiKeysTable.userId, userId),
),
);
.deleteFrom('api_keys')
.where('id', '=', apiKeyId)
.where('user_id', '=', userId)
.execute();
}
async function getApiKeyByHash({ keyHash, db }: { keyHash: string; db: Database }) {
const [apiKey] = await db
.select()
.from(apiKeysTable)
.where(
eq(apiKeysTable.keyHash, keyHash),
);
async function getApiKeyByHash({ keyHash, db }: { keyHash: string; db: DatabaseClient }) {
const dbApiKey = await db
.selectFrom('api_keys')
.where('key_hash', '=', keyHash)
.selectAll()
.executeTakeFirst();
const apiKey = dbApiKey ? dbToApiKey(dbApiKey) : undefined;
return { apiKey };
}

View File

@@ -1,4 +1,7 @@
import type { ApiKeyPermissions } from './api-keys.types';
import { z } from 'zod';
import { API_KEY_ID_REGEX } from './api-keys.constants';
import { API_KEY_ID_REGEX, API_KEY_PERMISSIONS_VALUES } from './api-keys.constants';
export const apiKeyIdSchema = z.string().regex(API_KEY_ID_REGEX);
export const apiPermissionsSchema = z.array(z.enum(API_KEY_PERMISSIONS_VALUES as [ApiKeyPermissions, ...ApiKeyPermissions[]]));

View File

@@ -1,43 +0,0 @@
import type { ApiKeyPermissions } from './api-keys.types';
import { index, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core';
import { organizationMembersTable } from '../organizations/organizations.table';
import { createPrimaryKeyField, createTimestampColumns } from '../shared/db/columns.helpers';
import { usersTable } from '../users/users.table';
import { API_KEY_ID_PREFIX } from './api-keys.constants';
export const apiKeysTable = sqliteTable(
'api_keys',
{
...createPrimaryKeyField({ prefix: API_KEY_ID_PREFIX }),
...createTimestampColumns(),
name: text('name').notNull(),
keyHash: text('key_hash').notNull().unique(),
// the prefix is used to identify the key, it is the
prefix: text('prefix').notNull(),
userId: text('user_id')
.notNull()
.references(() => usersTable.id, { onDelete: 'cascade', onUpdate: 'cascade' }),
lastUsedAt: integer('last_used_at', { mode: 'timestamp_ms' }),
expiresAt: integer('expires_at', { mode: 'timestamp_ms' }),
permissions: text('permissions', { mode: 'json' }).notNull().$type<ApiKeyPermissions[]>().default([]),
allOrganizations: integer('all_organizations', { mode: 'boolean' }).notNull().default(false),
},
table => [
// To get an API key by its token
index('key_hash_index').on(table.keyHash),
],
);
// We use an intermediate table (instead of a json array) to link API keys to organization members, so the relationship between
// the api key and the organization is deleted on cascade when the organization is deleted or the member is removed from the organization.
export const apiKeyOrganizationsTable = sqliteTable('api_key_organizations', {
apiKeyId: text('api_key_id')
.notNull()
.references(() => apiKeysTable.id, { onDelete: 'cascade', onUpdate: 'cascade' }),
organizationMemberId: text('organization_member_id')
.notNull()
.references(() => organizationMembersTable.id, { onDelete: 'cascade', onUpdate: 'cascade' }),
});

View File

@@ -1,6 +1,6 @@
import type { API_KEY_PERMISSIONS_VALUES } from './api-keys.constants';
import type { apiKeysTable } from './api-keys.tables';
export type ApiKeyPermissions = (typeof API_KEY_PERMISSIONS_VALUES)[number];
export type ApiKey = typeof apiKeysTable.$inferSelect;
// Re-export types from tables for backward compatibility
export type { ApiKey, ApiKeyOrganization, InsertableApiKey, InsertableApiKeyOrganization } from './api-keys.new.tables';

View File

@@ -1,12 +1,35 @@
import type { ApiKey, ApiKeyPermissions } from '../../api-keys/api-keys.types';
import type { Config } from '../../config/config.types';
import type { Context } from '../server.types';
import type {
AuthAccount,
AuthSession,
AuthVerification,
DbInsertableAuthAccount,
DbInsertableAuthSession,
DbInsertableAuthVerification,
DbSelectableAuthAccount,
DbSelectableAuthSession,
DbSelectableAuthVerification,
InsertableAuthAccount,
InsertableAuthSession,
InsertableAuthVerification,
} from './auth.new.tables';
import type { Session } from './auth.types';
import { uniq } from 'lodash-es';
import { getClientBaseUrl } from '../../config/config.models';
import { createError } from '../../shared/errors/errors';
import { generateId } from '../../shared/random/ids';
import { isNil } from '../../shared/utils';
const authSessionIdPrefix = 'auth_ses';
const authAccountIdPrefix = 'auth_acc';
const authVerificationIdPrefix = 'auth_ver';
const generateAuthSessionId = () => generateId({ prefix: authSessionIdPrefix });
const generateAuthAccountId = () => generateId({ prefix: authAccountIdPrefix });
const generateAuthVerificationId = () => generateId({ prefix: authVerificationIdPrefix });
export function getUser({ context }: { context: Context }) {
const userId = context.get('userId');
@@ -48,7 +71,7 @@ export function isAuthenticationValid({
authType,
}: {
session?: Session | null | undefined;
apiKey?: ApiKey | null | undefined;
apiKey?: Omit<ApiKey, 'keyHash'> | null | undefined;
requiredApiKeyPermissions?: ApiKeyPermissions[];
authType: 'api-key' | 'session' | null;
}): boolean {
@@ -84,3 +107,136 @@ export function isAuthenticationValid({
return false;
}
// DB <-> Business model transformers
// Auth Session transformers
export function dbToAuthSession(dbSession?: DbSelectableAuthSession): AuthSession | undefined {
if (!dbSession) {
return undefined;
}
return {
id: dbSession.id,
token: dbSession.token,
userId: dbSession.user_id,
ipAddress: dbSession.ip_address,
userAgent: dbSession.user_agent,
activeOrganizationId: dbSession.active_organization_id,
createdAt: new Date(dbSession.created_at),
updatedAt: new Date(dbSession.updated_at),
expiresAt: new Date(dbSession.expires_at),
};
}
export function authSessionToDb(
session: InsertableAuthSession,
{
now = new Date(),
generateId = generateAuthSessionId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableAuthSession {
return {
id: session.id ?? generateId(),
token: session.token,
user_id: session.userId,
ip_address: session.ipAddress,
user_agent: session.userAgent,
active_organization_id: session.activeOrganizationId,
created_at: session.createdAt?.getTime() ?? now.getTime(),
updated_at: session.updatedAt?.getTime() ?? now.getTime(),
expires_at: session.expiresAt.getTime(),
};
}
// Auth Account transformers
export function dbToAuthAccount(dbAccount?: DbSelectableAuthAccount): AuthAccount | undefined {
if (!dbAccount) {
return undefined;
}
return {
id: dbAccount.id,
userId: dbAccount.user_id,
accountId: dbAccount.account_id,
providerId: dbAccount.provider_id,
accessToken: dbAccount.access_token,
refreshToken: dbAccount.refresh_token,
scope: dbAccount.scope,
idToken: dbAccount.id_token,
password: dbAccount.password,
createdAt: new Date(dbAccount.created_at),
updatedAt: new Date(dbAccount.updated_at),
accessTokenExpiresAt: isNil(dbAccount.access_token_expires_at) ? null : new Date(dbAccount.access_token_expires_at),
refreshTokenExpiresAt: isNil(dbAccount.refresh_token_expires_at) ? null : new Date(dbAccount.refresh_token_expires_at),
};
}
export function authAccountToDb(
account: InsertableAuthAccount,
{
now = new Date(),
generateId = generateAuthAccountId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableAuthAccount {
return {
id: account.id ?? generateId(),
user_id: account.userId,
account_id: account.accountId,
provider_id: account.providerId,
access_token: account.accessToken,
refresh_token: account.refreshToken,
scope: account.scope,
id_token: account.idToken,
password: account.password,
created_at: account.createdAt?.getTime() ?? now.getTime(),
updated_at: account.updatedAt?.getTime() ?? now.getTime(),
access_token_expires_at: account.accessTokenExpiresAt?.getTime(),
refresh_token_expires_at: account.refreshTokenExpiresAt?.getTime(),
};
}
// Auth Verification transformers
export function dbToAuthVerification(dbVerification?: DbSelectableAuthVerification): AuthVerification | undefined {
if (!dbVerification) {
return undefined;
}
return {
id: dbVerification.id,
identifier: dbVerification.identifier,
value: dbVerification.value,
createdAt: new Date(dbVerification.created_at),
updatedAt: new Date(dbVerification.updated_at),
expiresAt: new Date(dbVerification.expires_at),
};
}
export function authVerificationToDb(
verification: InsertableAuthVerification,
{
now = new Date(),
generateId = generateAuthVerificationId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableAuthVerification {
return {
id: verification.id ?? generateId(),
identifier: verification.identifier,
value: verification.value,
created_at: verification.createdAt?.getTime() ?? now.getTime(),
updated_at: verification.updatedAt?.getTime() ?? now.getTime(),
expires_at: verification.expiresAt.getTime(),
};
}

View File

@@ -0,0 +1,81 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../database/database.columns.types';
// --- Auth Sessions
export type AuthSessionsTable = TableWithIdAndTimestamps<{
token: string;
user_id: string | null;
expires_at: number;
ip_address: string | null;
user_agent: string | null;
active_organization_id: string | null;
}>;
export type DbSelectableAuthSession = Selectable<AuthSessionsTable>;
export type DbInsertableAuthSession = Insertable<AuthSessionsTable>;
export type DbUpdateableAuthSession = Updateable<AuthSessionsTable>;
export type InsertableAuthSession = BusinessInsertable<DbInsertableAuthSession, {
expiresAt: Date;
}>;
export type AuthSession = Expand<CamelCaseKeys<Omit<DbSelectableAuthSession, 'created_at' | 'updated_at' | 'expires_at'> & {
createdAt: Date;
updatedAt: Date;
expiresAt: Date;
}>>;
// --- Auth Accounts
export type AuthAccountsTable = TableWithIdAndTimestamps<{
user_id: string | null;
account_id: string;
provider_id: string;
access_token: string | null;
refresh_token: string | null;
access_token_expires_at: number | null;
refresh_token_expires_at: number | null;
scope: string | null;
id_token: string | null;
password: string | null;
}>;
export type DbSelectableAuthAccount = Selectable<AuthAccountsTable>;
export type DbInsertableAuthAccount = Insertable<AuthAccountsTable>;
export type DbUpdateableAuthAccount = Updateable<AuthAccountsTable>;
export type InsertableAuthAccount = BusinessInsertable<DbInsertableAuthAccount, {
accessTokenExpiresAt?: Date | null;
refreshTokenExpiresAt?: Date | null;
}>;
export type AuthAccount = Expand<CamelCaseKeys<Omit<DbSelectableAuthAccount, 'created_at' | 'updated_at' | 'access_token_expires_at' | 'refresh_token_expires_at'> & {
createdAt: Date;
updatedAt: Date;
accessTokenExpiresAt: Date | null;
refreshTokenExpiresAt: Date | null;
}>>;
// --- Auth Verifications
export type AuthVerificationsTable = TableWithIdAndTimestamps<{
identifier: string;
value: string;
expires_at: number;
}>;
export type DbSelectableAuthVerification = Selectable<AuthVerificationsTable>;
export type DbInsertableAuthVerification = Insertable<AuthVerificationsTable>;
export type DbUpdateableAuthVerification = Updateable<AuthVerificationsTable>;
export type InsertableAuthVerification = BusinessInsertable<DbInsertableAuthVerification, {
expiresAt: Date;
}>;
export type AuthVerification = Expand<CamelCaseKeys<Omit<DbSelectableAuthVerification, 'created_at' | 'updated_at' | 'expires_at'> & {
createdAt: Date;
updatedAt: Date;
expiresAt: Date;
}>>;

View File

@@ -1,15 +1,12 @@
import type { Config } from '../../config/config.types';
import type { TrackingServices } from '../../tracking/tracking.services';
import type { Database } from '../database/database.types';
import type { DatabaseClient } from '../database/database.types';
import type { AuthEmailsServices } from './auth.emails.services';
import { betterAuth } from 'better-auth';
import { drizzleAdapter } from 'better-auth/adapters/drizzle';
import { genericOAuth } from 'better-auth/plugins';
import { getServerBaseUrl } from '../../config/config.models';
import { createLogger } from '../../shared/logger/logger';
import { usersTable } from '../../users/users.table';
import { getTrustedOrigins } from './auth.models';
import { accountsTable, sessionsTable, verificationsTable } from './auth.tables';
export type Auth = ReturnType<typeof getAuth>['auth'];
@@ -21,7 +18,7 @@ export function getAuth({
authEmailsServices,
trackingServices,
}: {
db: Database;
db: DatabaseClient;
config: Config;
authEmailsServices: AuthEmailsServices;
trackingServices: TrackingServices;
@@ -63,18 +60,11 @@ export function getAuth({
}
: undefined,
database: drizzleAdapter(
database: {
db,
{
provider: 'sqlite',
schema: {
user: usersTable,
account: accountsTable,
session: sessionsTable,
verification: verificationsTable,
},
},
),
casing: 'snake', // Table names are in snake_case
type: 'sqlite',
},
databaseHooks: {
user: {

View File

@@ -1,7 +1,9 @@
import { index, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core';
import { organizationsTable } from '../../organizations/organizations.table';
import { createPrimaryKeyField, createTimestampColumns } from '../../shared/db/columns.helpers';
import { usersTable } from '../../users/users.table';
// Legacy stub for Drizzle schema - this file is no longer used in production
const usersTable = { id: '' } as any;
export const sessionsTable = sqliteTable(
'auth_sessions',

View File

@@ -0,0 +1,39 @@
import type { Expand } from '@corentinth/chisels';
import type { ColumnType } from 'kysely';
export type IdColumn = ColumnType<string, string, never>;
export type CreatedAtColumn = {
created_at: number;
};
export type UpdatedAtColumn = {
updated_at: number;
};
export type WithTimestamps<T> = T & CreatedAtColumn & UpdatedAtColumn;
export type TableWithIdAndTimestamps<T> = Expand<{
id: IdColumn;
} & WithTimestamps<T>>;
export type TimestampsToDate<T> = Omit<T, 'created_at' | 'updated_at'> & {
createdAt: Date;
updatedAt: Date;
};
// Utility type to recursively convert snake_case to camelCase
type SnakeToCamelCase<S extends string> = S extends `${infer First}_${infer Rest}`
? `${First}${Capitalize<SnakeToCamelCase<Rest>>}`
: S;
// Utility type to convert snake_case keys to camelCase of root level only
export type CamelCaseKeys<T> = {
[K in keyof T as K extends string ? SnakeToCamelCase<K> : K]: T[K];
};
export type BusinessInsertable<T, Extras extends Record<string, unknown> = Record<string, never>> = Expand<Omit<CamelCaseKeys<T>, 'id' | 'createdAt' | 'updatedAt' | keyof Extras> & {
id?: string;
createdAt?: Date;
updatedAt?: Date;
} & Extras>;

View File

@@ -1,4 +1,4 @@
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from './database';
import { serializeSchema } from './database.test-utils';
@@ -7,10 +7,10 @@ describe('database-utils test', () => {
describe('serializeSchema', () => {
test('given a database with some tables, it should return the schema as a string, used for db state snapshot', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await db.run(sql`CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)`);
await db.run(sql`CREATE INDEX idx_test_name ON test (name)`);
await db.run(sql`CREATE VIEW test_view AS SELECT * FROM test`);
await db.run(sql`CREATE TRIGGER test_trigger AFTER INSERT ON test BEGIN SELECT 1; END`);
await db.executeQuery(sql`CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)`.compile(db));
await db.executeQuery(sql`CREATE INDEX idx_test_name ON test (name)`.compile(db));
await db.executeQuery(sql`CREATE VIEW test_view AS SELECT * FROM test`.compile(db));
await db.executeQuery(sql`CREATE TRIGGER test_trigger AFTER INSERT ON test BEGIN SELECT 1; END`.compile(db));
const schema = await serializeSchema({ db });
expect(schema).toMatchInlineSnapshot(`

View File

@@ -1,21 +1,21 @@
import type { Database } from './database.types';
import type { Database, DatabaseClient } from './database.types';
import { createNoopLogger } from '@crowlog/logger';
import { sql } from 'drizzle-orm';
import { CompiledQuery } from 'kysely';
import { runMigrations } from '../../../migrations/migrations.usecases';
import { apiKeyOrganizationsTable, apiKeysTable } from '../../api-keys/api-keys.tables';
import { documentsTable } from '../../documents/documents.table';
import { intakeEmailsTable } from '../../intake-emails/intake-emails.tables';
import { organizationInvitationsTable, organizationMembersTable, organizationsTable } from '../../organizations/organizations.table';
import { organizationSubscriptionsTable } from '../../subscriptions/subscriptions.tables';
import { taggingRuleActionsTable, taggingRuleConditionsTable, taggingRulesTable } from '../../tagging-rules/tagging-rules.tables';
import { documentsTagsTable, tagsTable } from '../../tags/tags.table';
import { usersTable } from '../../users/users.table';
import { webhookDeliveriesTable, webhookEventsTable, webhooksTable } from '../../webhooks/webhooks.tables';
import { apiKeyOrganizationToDb, apiKeyToDb } from '../../api-keys/api-keys.models';
import { documentToDb } from '../../documents/documents.models';
import { intakeEmailToDb } from '../../intake-emails/intake-emails.models';
import { organizationInvitationToDb, organizationMemberToDb, organizationToDb } from '../../organizations/organizations.models';
import { organizationSubscriptionToDb } from '../../subscriptions/subscriptions.models';
import { taggingRuleActionToDb, taggingRuleConditionToDb, taggingRuleToDb } from '../../tagging-rules/tagging-rules.models';
import { documentTagToDb, tagToDb } from '../../tags/tags.models';
import { userToDb } from '../../users/users.models';
import { webhookDeliveryToDb, webhookEventToDb, webhookToDb } from '../../webhooks/webhooks.models';
import { setupDatabase } from './database';
export { createInMemoryDatabase, seedDatabase };
async function createInMemoryDatabase(seedOptions: Omit<Parameters<typeof seedDatabase>[0], 'db'> | undefined = {}) {
async function createInMemoryDatabase(seedingRows: SeedingRows | undefined = {}): Promise<{ db: DatabaseClient }> {
const { db } = setupDatabase({ url: ':memory:' });
await runMigrations({
@@ -24,73 +24,85 @@ async function createInMemoryDatabase(seedOptions: Omit<Parameters<typeof seedDa
logger: createNoopLogger(),
});
await seedDatabase({ db, ...seedOptions });
await seedDatabase({ db, seedingRows });
return {
db,
};
}
const seedTables = {
users: usersTable,
organizations: organizationsTable,
organizationMembers: organizationMembersTable,
documents: documentsTable,
tags: tagsTable,
documentsTags: documentsTagsTable,
intakeEmails: intakeEmailsTable,
organizationSubscriptions: organizationSubscriptionsTable,
taggingRules: taggingRulesTable,
taggingRuleConditions: taggingRuleConditionsTable,
taggingRuleActions: taggingRuleActionsTable,
apiKeys: apiKeysTable,
apiKeyOrganizations: apiKeyOrganizationsTable,
webhooks: webhooksTable,
webhookEvents: webhookEventsTable,
webhookDeliveries: webhookDeliveriesTable,
organizationInvitations: organizationInvitationsTable,
} as const;
type SeedTablesRows = {
[K in keyof typeof seedTables]?: typeof seedTables[K] extends { $inferInsert: infer T } ? T[] : never;
// Take the insertable rows for each table
const tableSeedingMappers = {
users: { table: 'users', mapper: userToDb },
apiKeys: { table: 'api_keys', mapper: apiKeyToDb },
apiKeyOrganizations: { table: 'api_key_organizations', mapper: apiKeyOrganizationToDb },
organizations: { table: 'organizations', mapper: organizationToDb },
organizationMembers: { table: 'organization_members', mapper: organizationMemberToDb },
organizationInvitations: { table: 'organization_invitations', mapper: organizationInvitationToDb },
organizationSubscriptions: { table: 'organization_subscriptions', mapper: organizationSubscriptionToDb },
documents: { table: 'documents', mapper: documentToDb },
tags: { table: 'tags', mapper: tagToDb },
documentsTags: { table: 'documents_tags', mapper: documentTagToDb },
taggingRules: { table: 'tagging_rules', mapper: taggingRuleToDb },
taggingRuleActions: { table: 'tagging_rule_actions', mapper: taggingRuleActionToDb },
taggingRuleConditions: { table: 'tagging_rule_conditions', mapper: taggingRuleConditionToDb },
intakeEmails: { table: 'intake_emails', mapper: intakeEmailToDb },
webhooks: { table: 'webhooks', mapper: webhookToDb },
webhookEvents: { table: 'webhook_events', mapper: webhookEventToDb },
webhookDeliveries: { table: 'webhook_deliveries', mapper: webhookDeliveryToDb },
};
async function seedDatabase({ db, ...seedRows }: { db: Database } & SeedTablesRows) {
await Promise.all(
Object
.entries(seedRows)
.map(async ([table, rows]) => db
.insert(seedTables[table as keyof typeof seedTables])
.values(rows)
.execute(),
),
);
type SeedingRows = {
[Table in keyof typeof tableSeedingMappers]?: Parameters<typeof tableSeedingMappers[Table]['mapper']>['0'][];
};
async function seedDatabase({ db, seedingRows }: { db: DatabaseClient; seedingRows?: SeedingRows }) {
if (!seedingRows) {
return;
}
// Insert tables in order to respect foreign key constraints
const orderedKeys: (keyof typeof tableSeedingMappers)[] = [
'users',
'organizations',
'organizationMembers',
'organizationInvitations',
'organizationSubscriptions',
'apiKeys',
'apiKeyOrganizations',
'documents',
'tags',
'documentsTags',
'taggingRules',
'taggingRuleActions',
'taggingRuleConditions',
'intakeEmails',
'webhooks',
'webhookEvents',
'webhookDeliveries',
];
for (const mapperKey of orderedKeys) {
const rawRows = seedingRows[mapperKey];
if (!rawRows) {
continue;
}
const { table, mapper } = tableSeedingMappers[mapperKey];
// @ts-expect-error - We know that the mapper exists for the table
const rows = rawRows.map(rawRow => mapper(rawRow));
await db
.insertInto(table as keyof Database)
.values(rows)
.execute();
}
}
/*
PRAGMA encoding;
PRAGMA page_size;
PRAGMA auto_vacuum;
PRAGMA journal_mode; -- WAL is persistent
PRAGMA user_version;
PRAGMA application_id;
export async function serializeSchema({ db }: { db: DatabaseClient }) {
const { rows } = await db.executeQuery<{ sql: unknown }>(CompiledQuery.raw(`SELECT sql FROM sqlite_schema WHERE sql IS NOT NULL AND type IN ('table','index','view','trigger') ORDER BY type, name`));
*/
export async function serializeSchema({ db }: { db: Database }) {
const result = await db.batch([
// db.run(sql`PRAGMA encoding`),
// db.run(sql`PRAGMA page_size`),
// db.run(sql`PRAGMA auto_vacuum`),
// db.run(sql`PRAGMA journal_mode`),
// db.run(sql`PRAGMA user_version`),
// db.run(sql`PRAGMA application_id`),
db.run(sql`SELECT sql FROM sqlite_schema WHERE sql IS NOT NULL AND type IN ('table','index','view','trigger') ORDER BY type, name`),
]);
return Array
.from(result.values())
.flatMap(({ rows }) => rows.map(({ sql }) => minifyQuery(String(sql))))
return rows
.map(({ sql }) => minifyQuery(String(sql)))
.join('\n');
}

View File

@@ -1,23 +1,44 @@
import type { Logger } from '@crowlog/logger';
import type { ShutdownHandlerRegistration } from '../graceful-shutdown/graceful-shutdown.services';
import type { Database } from './database.types';
import { createClient } from '@libsql/client';
import { drizzle } from 'drizzle-orm/libsql';
import { LibsqlDialect } from '@libsql/kysely-libsql';
import { Kysely } from 'kysely';
import { createLogger } from '../../shared/logger/logger';
export { setupDatabase };
function setupDatabase({
export function setupDatabase({
url,
authToken,
encryptionKey,
registerShutdownHandler,
logger = createLogger({ namespace: 'database' }),
}: {
url: string;
authToken?: string;
encryptionKey?: string;
registerShutdownHandler?: ShutdownHandlerRegistration;
logger?: Logger;
}) {
const client = createClient({ url, authToken, encryptionKey });
const db = drizzle(client);
const db = new Kysely<Database>({
// @ts-expect-error https://github.com/tursodatabase/kysely-libsql/issues/12
dialect: new LibsqlDialect({ client }),
log: (event) => {
const meta = {
sql: event.query.sql,
durationMs: event.queryDurationMillis,
};
if (event.level === 'error') {
logger.error({ error: event.error, ...meta }, 'Database query error');
return;
}
logger.debug({ ...meta }, 'Database query executed');
},
});
registerShutdownHandler?.({
id: 'database-client-close',

View File

@@ -1,3 +1,58 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
import type { Kysely } from 'kysely';
import type { MigrationTable } from '../../../migrations/migrations.tables';
import type { ApiKeyOrganizationsTable, ApiKeysTable } from '../../api-keys/api-keys.new.tables';
import type { DocumentActivityLogTable } from '../../documents/document-activity/document-activity.tables';
import type { DocumentsTable } from '../../documents/documents.tables';
import type { IntakeEmailsTable } from '../../intake-emails/intake-emails.new.tables';
import type { OrganizationInvitationsTable, OrganizationMembersTable, OrganizationsTable } from '../../organizations/organizations.tables';
import type { UserRolesTable } from '../../roles/roles.tables';
import type { OrganizationSubscriptionsTable } from '../../subscriptions/subscriptions.new.tables';
import type { TaggingRuleActionsTable, TaggingRuleConditionsTable, TaggingRulesTable } from '../../tagging-rules/tagging-rules.new.tables';
import type { DocumentsTagsTable, TagsTable } from '../../tags/tags.tables';
import type { UsersTable } from '../../users/users.tables';
import type { WebhookDeliveriesTable, WebhookEventsTable, WebhooksTable } from '../../webhooks/webhooks.tables';
import type { AuthAccountsTable, AuthSessionsTable, AuthVerificationsTable } from '../auth/auth.new.tables';
export type Database = LibSQLDatabase<Record<string, never>>;
export type Database = {
migrations: MigrationTable;
// Users & Auth
users: UsersTable;
user_roles: UserRolesTable;
auth_sessions: AuthSessionsTable;
auth_accounts: AuthAccountsTable;
auth_verifications: AuthVerificationsTable;
// Organizations
organizations: OrganizationsTable;
organization_members: OrganizationMembersTable;
organization_invitations: OrganizationInvitationsTable;
organization_subscriptions: OrganizationSubscriptionsTable;
// Documents
documents: DocumentsTable;
document_activity_log: DocumentActivityLogTable;
// Tags
tags: TagsTable;
documents_tags: DocumentsTagsTable;
// Tagging Rules
tagging_rules: TaggingRulesTable;
tagging_rule_conditions: TaggingRuleConditionsTable;
tagging_rule_actions: TaggingRuleActionsTable;
// Webhooks
webhooks: WebhooksTable;
webhook_events: WebhookEventsTable;
webhook_deliveries: WebhookDeliveriesTable;
// API Keys
api_keys: ApiKeysTable;
api_key_organizations: ApiKeyOrganizationsTable;
// Intake Emails
intake_emails: IntakeEmailsTable;
};
export type DatabaseClient = Kysely<Database>;

View File

@@ -1,5 +1,4 @@
import { describe, expect, test } from 'vitest';
import { usersTable } from '../../users/users.table';
import { createInMemoryDatabase } from './database.test-utils';
import { createIterator } from './database.usecases';
@@ -12,7 +11,7 @@ describe('database usecases', () => {
users: createUsers({ count: 10 }),
});
const query = db.select().from(usersTable).orderBy(usersTable.id).$dynamic();
const query = db.selectFrom('users').selectAll().orderBy('id');
const iterator = createIterator({ query });
const results: string[] = [];
@@ -29,7 +28,7 @@ describe('database usecases', () => {
users: createUsers({ count: 10 }),
});
const query = db.select().from(usersTable).orderBy(usersTable.id).$dynamic();
const query = db.selectFrom('users').selectAll().orderBy('id');
const iterator = createIterator({ query });
const results = await Array.fromAsync(iterator);
@@ -42,7 +41,7 @@ describe('database usecases', () => {
users: createUsers({ count: 10 }),
});
const query = db.select().from(usersTable).orderBy(usersTable.id).$dynamic();
const query = db.selectFrom('users').selectAll().orderBy('id');
const iterator = createIterator({ query, batchSize: 3 });
const results: string[] = [];

View File

@@ -1,20 +1,20 @@
import type { SQLiteSelect } from 'drizzle-orm/sqlite-core';
import type { ArrayElement } from '../../shared/types';
import type { SelectQueryBuilder } from 'kysely';
import type { Database } from './database.types';
export async function* createIterator<T extends SQLiteSelect>({
export async function* createIterator<O>({
query,
batchSize = 100,
}: { query: T; batchSize?: number }): AsyncGenerator<ArrayElement<T['_']['result']>> {
}: { query: SelectQueryBuilder<Database, any, O>; batchSize?: number }): AsyncGenerator<O> {
let offset = 0;
while (true) {
const results = await query.limit(batchSize).offset(offset);
const results = await query.limit(batchSize).offset(offset).execute();
if (results.length === 0) {
break;
}
for (const result of results) {
yield result as ArrayElement<T['_']['result']>;
yield result as Awaited<O>;
}
if (results.length < batchSize) {

View File

@@ -1,4 +1,4 @@
import type { Database } from '../../database/database.types';
import type { DatabaseClient } from '../../database/database.types';
import { describe, expect, test } from 'vitest';
import { overrideConfig } from '../../../config/config.test-utils';
import { createInMemoryDatabase } from '../../database/database.test-utils';
@@ -23,10 +23,10 @@ describe('health check routes e2e', () => {
test('when their is an issue with the database, the /api/health returns a 500', async () => {
const db = {
run: async () => {
executeTakeFirst: async () => {
throw new Error('Alerte générale !');
},
} as unknown as Database;
} as unknown as DatabaseClient;
const { app } = await createServer({ db, config: overrideConfig() });

View File

@@ -1,13 +1,13 @@
import type { Database } from '../database/database.types';
import type { DatabaseClient } from '../database/database.types';
import { describe, expect, test } from 'vitest';
import { createInMemoryDatabase } from '../database/database.test-utils';
import { isDatabaseHealthy } from './health-check.repository';
const faultyDatabase = {
run: async () => {
executeTakeFirst: () => {
throw new Error('Alerte générale !');
},
} as unknown as Database;
} as unknown as DatabaseClient;
describe('health-check repository', () => {
describe('isDatabaseHealthy', () => {

View File

@@ -1,9 +1,10 @@
import type { Database } from '../database/database.types';
import type { DatabaseClient } from '../database/database.types';
import { safely } from '@corentinth/chisels';
import { sql } from 'drizzle-orm';
import { sql } from 'kysely';
export async function isDatabaseHealthy({ db }: { db: Database }) {
const [result, error] = await safely(db.run(sql`SELECT 1;`));
export async function isDatabaseHealthy({ db }: { db: DatabaseClient }) {
// "SELECT 1 as one"
const [result, error] = await safely(db.selectNoFrom(sql.lit('1').as('one')).executeTakeFirst());
return error === null && result.rows.length > 0 && result.rows[0]?.['1'] === 1;
return error === null && result?.one === '1';
}

View File

@@ -8,13 +8,13 @@ import type { TaskServices } from '../tasks/tasks.services';
import type { TrackingServices } from '../tracking/tracking.services';
import type { Auth } from './auth/auth.services';
import type { Session } from './auth/auth.types';
import type { Database } from './database/database.types';
import type { DatabaseClient } from './database/database.types';
export type ServerInstanceGenerics = {
Variables: {
userId: string | null;
session: Session | null;
apiKey: ApiKey | null;
apiKey: Omit<ApiKey, 'keyHash'> | null;
authType: 'session' | 'api-key' | null;
};
};
@@ -25,7 +25,7 @@ export type ServerInstance = Hono<ServerInstanceGenerics>;
export type GlobalDependencies = {
config: Config;
db: Database;
db: DatabaseClient;
auth: Auth;
emailsServices: EmailsServices;
subscriptionsServices: SubscriptionsServices;

View File

@@ -0,0 +1,43 @@
import type { DbInsertableDocumentActivity, DbSelectableDocumentActivity, DocumentActivity, InsertableDocumentActivity } from './document-activity.tables';
import { generateId } from '../../shared/random/ids';
import { isNil } from '../../shared/utils';
const documentActivityIdPrefix = 'doc_act';
const generateDocumentActivityId = () => generateId({ prefix: documentActivityIdPrefix });
export function dbToDocumentActivity(dbActivity?: DbSelectableDocumentActivity): DocumentActivity | undefined {
if (!dbActivity) {
return undefined;
}
return {
id: dbActivity.id,
documentId: dbActivity.document_id,
event: dbActivity.event,
eventData: isNil(dbActivity.event_data) ? null : JSON.parse(dbActivity.event_data) as Record<string, unknown>,
userId: dbActivity.user_id,
tagId: dbActivity.tag_id,
createdAt: new Date(dbActivity.created_at),
};
}
export function documentActivityToDb(
activity: InsertableDocumentActivity,
{
now = new Date(),
generateId = generateDocumentActivityId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableDocumentActivity {
return {
id: activity.id ?? generateId(),
document_id: activity.documentId,
event: activity.event,
event_data: activity.eventData ? JSON.stringify(activity.eventData) : null,
user_id: activity.userId,
tag_id: activity.tagId,
created_at: activity.createdAt?.getTime() ?? now.getTime(),
};
}

View File

@@ -1,16 +1,11 @@
import type { Database } from '../../app/database/database.types';
import type { DatabaseClient } from '../../app/database/database.types';
import type { DocumentActivityEvent } from './document-activity.types';
import { injectArguments } from '@corentinth/chisels';
import { and, desc, eq, getTableColumns } from 'drizzle-orm';
import { withPagination } from '../../shared/db/pagination';
import { tagsTable } from '../../tags/tags.table';
import { usersTable } from '../../users/users.table';
import { documentsTable } from '../documents.table';
import { documentActivityLogTable } from './document-activity.table';
import { dbToDocumentActivity, documentActivityToDb } from './document-activity.models';
export type DocumentActivityRepository = ReturnType<typeof createDocumentActivityRepository>;
export function createDocumentActivityRepository({ db }: { db: Database }) {
export function createDocumentActivityRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
saveDocumentActivity,
@@ -33,20 +28,21 @@ async function saveDocumentActivity({
eventData?: Record<string, unknown>;
userId?: string;
tagId?: string;
db: Database;
db: DatabaseClient;
}) {
const [activity] = await db
.insert(documentActivityLogTable)
.values({
const dbActivity = await db
.insertInto('document_activity_log')
.values(documentActivityToDb({
documentId,
event,
eventData,
userId,
tagId,
})
.returning();
}))
.returningAll()
.executeTakeFirst();
return { activity };
return { activity: dbToDocumentActivity(dbActivity) };
}
async function getOrganizationDocumentActivities({
@@ -60,42 +56,35 @@ async function getOrganizationDocumentActivities({
documentId: string;
pageIndex: number;
pageSize: number;
db: Database;
db: DatabaseClient;
}) {
const query = db
.select({
...getTableColumns(documentActivityLogTable),
user: {
id: usersTable.id,
name: usersTable.name,
},
tag: {
id: tagsTable.id,
name: tagsTable.name,
color: tagsTable.color,
description: tagsTable.description,
},
})
.from(documentActivityLogTable)
const activities = await db
.selectFrom('document_activity_log')
// Join with documents table to ensure the document exists in the organization
.innerJoin(documentsTable, eq(documentActivityLogTable.documentId, documentsTable.id))
.leftJoin(usersTable, eq(documentActivityLogTable.userId, usersTable.id))
.leftJoin(tagsTable, eq(documentActivityLogTable.tagId, tagsTable.id))
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentActivityLogTable.documentId, documentId),
),
);
const activities = await withPagination(
query.$dynamic(),
{
orderByColumn: desc(documentActivityLogTable.createdAt),
pageIndex,
pageSize,
},
);
.innerJoin('documents', 'document_activity_log.document_id', 'documents.id')
.leftJoin('users', 'document_activity_log.user_id', 'users.id')
.leftJoin('tags', 'document_activity_log.tag_id', 'tags.id')
.where('documents.organization_id', '=', organizationId)
.where('document_activity_log.document_id', '=', documentId)
.select([
'document_activity_log.id',
'document_activity_log.document_id',
'document_activity_log.event',
'document_activity_log.event_data',
'document_activity_log.user_id',
'document_activity_log.tag_id',
'document_activity_log.created_at',
'users.id as user_id_ref',
'users.name as user_name',
'tags.id as tag_id_ref',
'tags.name as tag_name',
'tags.color as tag_color',
'tags.description as tag_description',
])
.orderBy('document_activity_log.created_at', 'desc')
.limit(pageSize)
.offset(pageIndex * pageSize)
.execute();
return { activities };
}

View File

@@ -3,10 +3,12 @@ import type { DocumentActivityEvent } from './document-activity.types';
import { sqliteTable, text } from 'drizzle-orm/sqlite-core';
import { createCreatedAtField, createPrimaryKeyField } from '../../shared/db/columns.helpers';
import { tagsTable } from '../../tags/tags.table';
import { usersTable } from '../../users/users.table';
import { documentsTable } from '../documents.table';
import { DOCUMENT_ACTIVITY_EVENT_LIST } from './document-activity.constants';
// Legacy stub for Drizzle schema - this file is no longer used in production
const usersTable = { id: '' } as any;
export const documentActivityLogTable = sqliteTable('document_activity_log', {
...createPrimaryKeyField({ prefix: 'doc_act' }),
...createCreatedAtField(),

View File

@@ -0,0 +1,28 @@
import type { Expand } from '@corentinth/chisels';
import type { ColumnType, Insertable, Selectable, Updateable } from 'kysely';
import type { CamelCaseKeys } from '../../app/database/database.columns.types';
export type DocumentActivityLogTable = {
id: ColumnType<string, string, never>;
created_at: number;
document_id: string;
event: string;
event_data: string | null;
user_id: string | null;
tag_id: string | null;
};
export type DbSelectableDocumentActivity = Selectable<DocumentActivityLogTable>;
export type DbInsertableDocumentActivity = Insertable<DocumentActivityLogTable>;
export type DbUpdateableDocumentActivity = Updateable<DocumentActivityLogTable>;
export type InsertableDocumentActivity = Expand<CamelCaseKeys<Omit<DbInsertableDocumentActivity, 'id' | 'created_at' | 'event_data'>> & {
id?: string;
createdAt?: Date;
eventData?: Record<string, unknown> | null;
}>;
export type DocumentActivity = Expand<CamelCaseKeys<Omit<DbSelectableDocumentActivity, 'created_at' | 'event_data'> & {
createdAt: Date;
eventData: Record<string, unknown> | null;
}>>;

View File

@@ -1,9 +1,9 @@
import type { PartialBy } from '@corentinth/chisels';
import type { DbSelectableDocument } from './documents.types';
import type { DbInsertableDocument, DbSelectableDocument, Document, InsertableDocument } from './documents.tables';
import { omit } from 'lodash-es';
import { getExtension } from '../shared/files/file-names';
import { generateId } from '../shared/random/ids';
import { isDefined } from '../shared/utils';
import { isDefined, isNil } from '../shared/utils';
import { ORIGINAL_DOCUMENTS_STORAGE_KEY } from './documents.constants';
export function joinStorageKeyParts(...parts: string[]) {
@@ -28,7 +28,7 @@ export function isDocumentSizeLimitEnabled({ maxUploadSize }: { maxUploadSize: n
return maxUploadSize > 0;
}
export function formatDocumentForApi<T extends PartialBy<DbSelectableDocument, 'content'>>({ document }: { document: T }) {
export function formatDocumentForApi<T extends PartialBy<Document, 'content'>>({ document }: { document: T }) {
return {
...omit(
document,
@@ -42,6 +42,67 @@ export function formatDocumentForApi<T extends PartialBy<DbSelectableDocument, '
};
}
export function formatDocumentsForApi<T extends PartialBy<DbSelectableDocument, 'content'>>({ documents }: { documents: T[] }) {
export function formatDocumentsForApi<T extends PartialBy<Document, 'content'>>({ documents }: { documents: T[] }) {
return documents.map(document => formatDocumentForApi({ document }));
}
// DB <-> Business model transformers
export function dbToDocument(dbDocument?: DbSelectableDocument): Document | undefined {
if (!dbDocument) {
return undefined;
}
return {
id: dbDocument.id,
organizationId: dbDocument.organization_id,
createdBy: dbDocument.created_by,
originalName: dbDocument.original_name,
originalSize: dbDocument.original_size,
originalStorageKey: dbDocument.original_storage_key,
originalSha256Hash: dbDocument.original_sha256_hash,
name: dbDocument.name,
mimeType: dbDocument.mime_type,
content: dbDocument.content,
fileEncryptionKeyWrapped: dbDocument.file_encryption_key_wrapped,
fileEncryptionKekVersion: dbDocument.file_encryption_kek_version,
fileEncryptionAlgorithm: dbDocument.file_encryption_algorithm,
deletedBy: dbDocument.deleted_by,
isDeleted: dbDocument.is_deleted === 1,
createdAt: new Date(dbDocument.created_at),
updatedAt: new Date(dbDocument.updated_at),
deletedAt: isNil(dbDocument.deleted_at) ? null : new Date(dbDocument.deleted_at),
};
}
export function documentToDb(
document: InsertableDocument,
{
now = new Date(),
generateId: generateIdFn = generateDocumentId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableDocument {
return {
id: document.id ?? generateIdFn(),
organization_id: document.organizationId,
created_by: document.createdBy,
original_name: document.originalName,
original_size: document.originalSize,
original_storage_key: document.originalStorageKey,
original_sha256_hash: document.originalSha256Hash,
name: document.name,
mime_type: document.mimeType,
content: document.content,
file_encryption_key_wrapped: document.fileEncryptionKeyWrapped,
file_encryption_kek_version: document.fileEncryptionKekVersion,
file_encryption_algorithm: document.fileEncryptionAlgorithm,
deleted_by: document.deletedBy,
deleted_at: document.deletedAt?.getTime(),
is_deleted: document.isDeleted === true ? 1 : 0,
created_at: document.createdAt?.getTime() ?? now.getTime(),
updated_at: document.updatedAt?.getTime() ?? now.getTime(),
};
}

View File

@@ -1,8 +1,10 @@
import { map } from 'lodash-es';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { createInMemoryDatabase } from '../app/database/database.test-utils';
import { ORGANIZATION_ROLES } from '../organizations/organizations.constants';
import { createDocumentAlreadyExistsError } from './documents.errors';
import { documentToDb } from './documents.models';
import { createDocumentsRepository } from './documents.repository';
describe('documents repository', () => {
@@ -16,15 +18,17 @@ describe('documents repository', () => {
const documentsRepository = createDocumentsRepository({ db });
const { document } = await documentsRepository.saveOrganizationDocument({
const { document } = await documentsRepository.saveOrganizationDocument(documentToDb({
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
name: 'Document 1',
originalName: 'document-1.pdf',
originalSize: 100,
originalStorageKey: 'document-1.pdf',
originalSha256Hash: 'hash1',
});
content: '',
}));
expect(document).to.include({
organizationId: 'organization-1',
@@ -79,28 +83,30 @@ describe('documents repository', () => {
const documentsRepository = createDocumentsRepository({ db });
await documentsRepository.saveOrganizationDocument({
await documentsRepository.saveOrganizationDocument(documentToDb({
organizationId: 'organization-1',
createdBy: 'user-1',
name: 'Document 1',
originalName: 'document-1.pdf',
content: 'lorem ipsum',
originalSize: 100,
originalStorageKey: '',
mimeType: 'application/pdf',
originalSha256Hash: 'hash1',
});
}));
await expect(
documentsRepository.saveOrganizationDocument({
documentsRepository.saveOrganizationDocument(documentToDb({
organizationId: 'organization-1',
createdBy: 'user-1',
name: 'Document 1',
originalName: 'document-1.pdf',
content: 'lorem ipsum',
originalSize: 100,
originalStorageKey: '',
mimeType: 'application/pdf',
originalSha256Hash: 'hash1',
}),
})),
).rejects.toThrow(createDocumentAlreadyExistsError());
});
});
@@ -112,14 +118,14 @@ describe('documents repository', () => {
organizations: [{ id: 'organization-1', name: 'Organization 1' }],
organizationMembers: [{ organizationId: 'organization-1', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER }],
documents: [
{ id: 'doc-1', organizationId: 'organization-1', createdBy: 'user-1', name: 'Document 1', originalName: 'document-1.pdf', content: 'lorem ipsum', originalStorageKey: '', mimeType: 'application/pdf', originalSha256Hash: 'hash1' },
{ id: 'doc-2', organizationId: 'organization-1', createdBy: 'user-1', name: 'File 2', originalName: 'document-2.pdf', content: 'lorem', originalStorageKey: '', mimeType: 'application/pdf', originalSha256Hash: 'hash2' },
{ id: 'doc-3', organizationId: 'organization-1', createdBy: 'user-1', name: 'File 3', originalName: 'document-3.pdf', content: 'ipsum', originalStorageKey: '', mimeType: 'application/pdf', originalSha256Hash: 'hash3' },
{ id: 'doc-1', organizationId: 'organization-1', createdBy: 'user-1', name: 'Document 1', originalName: 'document-1.pdf', content: 'lorem ipsum', originalStorageKey: '', mimeType: 'application/pdf', originalSize: 100, originalSha256Hash: 'hash1' },
{ id: 'doc-2', organizationId: 'organization-1', createdBy: 'user-1', name: 'File 2', originalName: 'document-2.pdf', content: 'lorem', originalStorageKey: '', mimeType: 'application/pdf', originalSize: 100, originalSha256Hash: 'hash2' },
{ id: 'doc-3', organizationId: 'organization-1', createdBy: 'user-1', name: 'File 3', originalName: 'document-3.pdf', content: 'ipsum', originalStorageKey: '', mimeType: 'application/pdf', originalSize: 100, originalSha256Hash: 'hash3' },
],
});
// Rebuild the FTS index since we are using an in-memory database
await db.$client.execute(`INSERT INTO documents_fts(documents_fts) VALUES('rebuild');`);
await sql`INSERT INTO documents_fts(documents_fts) VALUES('rebuild')`.execute(db);
const documentsRepository = createDocumentsRepository({ db });

View File

@@ -1,22 +1,18 @@
import type { Database } from '../app/database/database.types';
import type { DbInsertableDocument } from './documents.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { DbInsertableDocument, Document } from './documents.tables';
import { injectArguments, safely } from '@corentinth/chisels';
import { subDays } from 'date-fns';
import { and, count, desc, eq, getTableColumns, lt, sql } from 'drizzle-orm';
import { omit } from 'lodash-es';
import { createIterator } from '../app/database/database.usecases';
import { sql } from 'kysely';
import { createOrganizationNotFoundError } from '../organizations/organizations.errors';
import { isUniqueConstraintError } from '../shared/db/constraints.models';
import { withPagination } from '../shared/db/pagination';
import { createError } from '../shared/errors/errors';
import { isDefined, isNil, omitUndefined } from '../shared/utils';
import { documentsTagsTable, tagsTable } from '../tags/tags.table';
import { isNil, omitUndefined } from '../shared/utils';
import { createDocumentAlreadyExistsError, createDocumentNotFoundError } from './documents.errors';
import { documentsTable } from './documents.table';
import { dbToDocument } from './documents.models';
export type DocumentsRepository = ReturnType<typeof createDocumentsRepository>;
export function createDocumentsRepository({ db }: { db: Database }) {
export function createDocumentsRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
saveOrganizationDocument,
@@ -42,22 +38,25 @@ export function createDocumentsRepository({ db }: { db: Database }) {
);
}
async function getOrganizationDocumentBySha256Hash({ sha256Hash, organizationId, db }: { sha256Hash: string; organizationId: string; db: Database }) {
const [document] = await db
.select()
.from(documentsTable)
.where(
and(
eq(documentsTable.originalSha256Hash, sha256Hash),
eq(documentsTable.organizationId, organizationId),
),
);
async function getOrganizationDocumentBySha256Hash({ sha256Hash, organizationId, db }: { sha256Hash: string; organizationId: string; db: DatabaseClient }) {
const dbDocument = await db
.selectFrom('documents')
.where('original_sha256_hash', '=', sha256Hash)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
return { document };
return { document: dbToDocument(dbDocument) };
}
async function saveOrganizationDocument({ db, ...documentToInsert }: { db: Database } & DbInsertableDocument) {
const [documents, error] = await safely(db.insert(documentsTable).values(documentToInsert).returning());
async function saveOrganizationDocument({ db, ...documentToInsert }: { db: DatabaseClient } & DbInsertableDocument) {
const [dbDocument, error] = await safely(
db
.insertInto('documents')
.values(documentToInsert)
.returningAll()
.executeTakeFirst(),
);
if (isUniqueConstraintError({ error })) {
throw createDocumentAlreadyExistsError();
@@ -67,7 +66,7 @@ async function saveOrganizationDocument({ db, ...documentToInsert }: { db: Datab
throw error;
}
const [document] = documents ?? [];
const document = dbToDocument(dbDocument);
if (isNil(document)) {
// Very unlikely to happen as the insertion throws an issue, it's for type safety
@@ -82,20 +81,20 @@ async function saveOrganizationDocument({ db, ...documentToInsert }: { db: Datab
return { document };
}
async function getOrganizationDocumentsCount({ organizationId, filters, db }: { organizationId: string; filters?: { tags?: string[] }; db: Database }) {
const [record] = await db
.select({
documentsCount: count(documentsTable.id),
})
.from(documentsTable)
.leftJoin(documentsTagsTable, eq(documentsTable.id, documentsTagsTable.documentId))
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, false),
...(filters?.tags ? filters.tags.map(tag => eq(documentsTagsTable.tagId, tag)) : []),
),
);
async function getOrganizationDocumentsCount({ organizationId, filters, db }: { organizationId: string; filters?: { tags?: string[] }; db: DatabaseClient }) {
let query = db
.selectFrom('documents')
.select(sql<number>`count(distinct documents.id)`.as('documentsCount'))
.where('documents.organization_id', '=', organizationId)
.where('documents.is_deleted', '=', 0);
if (filters?.tags && filters.tags.length > 0) {
query = query
.leftJoin('documents_tags', 'documents.id', 'documents_tags.document_id')
.where('documents_tags.tag_id', 'in', filters.tags);
}
const record = await query.executeTakeFirst();
if (isNil(record)) {
throw createOrganizationNotFoundError();
@@ -106,18 +105,13 @@ async function getOrganizationDocumentsCount({ organizationId, filters, db }: {
return { documentsCount };
}
async function getOrganizationDeletedDocumentsCount({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({
documentsCount: count(documentsTable.id),
})
.from(documentsTable)
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, true),
),
);
async function getOrganizationDeletedDocumentsCount({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const record = await db
.selectFrom('documents')
.select(sql<number>`count(*)`.as('documentsCount'))
.where('organization_id', '=', organizationId)
.where('is_deleted', '=', 1)
.executeTakeFirst();
if (isNil(record)) {
throw createOrganizationNotFoundError();
@@ -139,102 +133,147 @@ async function getOrganizationDocuments({
pageIndex: number;
pageSize: number;
filters?: { tags?: string[] };
db: Database;
db: DatabaseClient;
}) {
const query = db
.select({
document: omit(getTableColumns(documentsTable), ['content']),
tag: getTableColumns(tagsTable),
})
.from(documentsTable)
.leftJoin(documentsTagsTable, eq(documentsTable.id, documentsTagsTable.documentId))
.leftJoin(tagsTable, eq(tagsTable.id, documentsTagsTable.tagId))
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, false),
...(filters?.tags ? filters.tags.map(tag => eq(documentsTagsTable.tagId, tag)) : []),
),
);
let query = db
.selectFrom('documents')
.leftJoin('documents_tags', 'documents.id', 'documents_tags.document_id')
.leftJoin('tags', 'tags.id', 'documents_tags.tag_id')
.where('documents.organization_id', '=', organizationId)
.where('documents.is_deleted', '=', 0);
const documentsTagsQuery = withPagination(
query.$dynamic(),
{
orderByColumn: desc(documentsTable.createdAt),
pageIndex,
pageSize,
},
);
if (filters?.tags && filters.tags.length > 0) {
query = query.where('documents_tags.tag_id', 'in', filters.tags);
}
const documentsTags = await documentsTagsQuery;
const documentsTags = await query
.select([
'documents.id',
'documents.organization_id',
'documents.created_by',
'documents.original_name',
'documents.original_size',
'documents.original_storage_key',
'documents.original_sha256_hash',
'documents.name',
'documents.mime_type',
'documents.file_encryption_key_wrapped',
'documents.file_encryption_kek_version',
'documents.file_encryption_algorithm',
'documents.deleted_at',
'documents.deleted_by',
'documents.is_deleted',
'documents.created_at',
'documents.updated_at',
'tags.id as tag_id',
'tags.organization_id as tag_organization_id',
'tags.name as tag_name',
'tags.color as tag_color',
'tags.description as tag_description',
'tags.created_at as tag_created_at',
'tags.updated_at as tag_updated_at',
])
.orderBy('documents.created_at', 'desc')
.limit(pageSize)
.offset(pageIndex * pageSize)
.execute();
const groupedDocuments = documentsTags.reduce((acc, { document, tag }) => {
if (!acc[document.id]) {
acc[document.id] = {
...document,
tags: [],
const groupedDocuments = documentsTags.reduce((acc, row) => {
if (!acc[row.id]) {
const dbDoc = {
id: row.id,
organization_id: row.organization_id,
created_by: row.created_by,
original_name: row.original_name,
original_size: row.original_size,
original_storage_key: row.original_storage_key,
original_sha256_hash: row.original_sha256_hash,
name: row.name,
mime_type: row.mime_type,
content: '', // Not selected in this query
file_encryption_key_wrapped: row.file_encryption_key_wrapped,
file_encryption_kek_version: row.file_encryption_kek_version,
file_encryption_algorithm: row.file_encryption_algorithm,
deleted_at: row.deleted_at,
deleted_by: row.deleted_by,
is_deleted: row.is_deleted,
created_at: row.created_at,
updated_at: row.updated_at,
};
const document = dbToDocument(dbDoc);
if (document) {
acc[row.id] = {
...document,
tags: [],
};
}
}
if (tag) {
acc[document.id]!.tags.push(tag);
if (row.tag_id && acc[row.id]) {
acc[row.id]!.tags.push({
id: row.tag_id,
organization_id: row.tag_organization_id!,
name: row.tag_name!,
color: row.tag_color!,
description: row.tag_description!,
created_at: row.tag_created_at!,
updated_at: row.tag_updated_at!,
});
}
return acc;
}, {} as Record<string, Omit<typeof documentsTable.$inferSelect, 'content'> & { tags: typeof tagsTable.$inferSelect[] }>);
}, {} as Record<string, Document & { tags: any[] }>);
return {
documents: Object.values(groupedDocuments),
};
}
async function getOrganizationDeletedDocuments({ organizationId, pageIndex, pageSize, db }: { organizationId: string; pageIndex: number; pageSize: number; db: Database }) {
const query = db
.select()
.from(documentsTable)
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, true),
),
);
const documents = await withPagination(
query.$dynamic(),
{
orderByColumn: desc(documentsTable.deletedAt),
pageIndex,
pageSize,
},
);
async function getOrganizationDeletedDocuments({ organizationId, pageIndex, pageSize, db }: { organizationId: string; pageIndex: number; pageSize: number; db: DatabaseClient }) {
const dbDocuments = await db
.selectFrom('documents')
.where('organization_id', '=', organizationId)
.where('is_deleted', '=', 1)
.selectAll()
.orderBy('deleted_at', 'desc')
.limit(pageSize)
.offset(pageIndex * pageSize)
.execute();
return {
documents,
documents: dbDocuments.map(dbDoc => dbToDocument(dbDoc)).filter((doc): doc is NonNullable<typeof doc> => doc !== undefined),
};
}
async function getDocumentById({ documentId, organizationId, db }: { documentId: string; organizationId: string; db: Database }) {
const [document] = await db
.select()
.from(documentsTable)
.where(
and(
eq(documentsTable.id, documentId),
eq(documentsTable.organizationId, organizationId),
),
);
async function getDocumentById({ documentId, organizationId, db }: { documentId: string; organizationId: string; db: DatabaseClient }) {
const dbDocument = await db
.selectFrom('documents')
.where('id', '=', documentId)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
const document = dbToDocument(dbDocument);
if (!document) {
return { document: undefined };
}
const tags = await db
.select({
...getTableColumns(tagsTable),
})
.from(documentsTagsTable)
.leftJoin(tagsTable, eq(tagsTable.id, documentsTagsTable.tagId))
.where(eq(documentsTagsTable.documentId, documentId));
.selectFrom('documents_tags')
.leftJoin('tags', 'tags.id', 'documents_tags.tag_id')
.where('documents_tags.document_id', '=', documentId)
.select([
'tags.id',
'tags.organization_id',
'tags.name',
'tags.color',
'tags.description',
'tags.created_at',
'tags.updated_at',
])
.execute();
return {
document: {
@@ -244,39 +283,36 @@ async function getDocumentById({ documentId, organizationId, db }: { documentId:
};
}
async function softDeleteDocument({ documentId, organizationId, userId, db, now = new Date() }: { documentId: string; organizationId: string; userId: string; db: Database; now?: Date }) {
async function softDeleteDocument({ documentId, organizationId, userId, db, now = new Date() }: { documentId: string; organizationId: string; userId: string; db: DatabaseClient; now?: Date }) {
await db
.update(documentsTable)
.updateTable('documents')
.set({
isDeleted: true,
deletedBy: userId,
deletedAt: now,
is_deleted: 1,
deleted_by: userId,
deleted_at: now.getTime(),
})
.where(
and(
eq(documentsTable.id, documentId),
eq(documentsTable.organizationId, organizationId),
),
);
.where('id', '=', documentId)
.where('organization_id', '=', organizationId)
.execute();
}
async function restoreDocument({ documentId, organizationId, name, userId, db }: { documentId: string; organizationId: string; name?: string; userId?: string; db: Database }) {
const [document] = await db
.update(documentsTable)
.set({
isDeleted: false,
deletedBy: null,
deletedAt: null,
...(isDefined(name) ? { name, originalName: name } : {}),
...(isDefined(userId) ? { createdBy: userId } : {}),
})
.where(
and(
eq(documentsTable.id, documentId),
eq(documentsTable.organizationId, organizationId),
),
)
.returning();
async function restoreDocument({ documentId, organizationId, name, userId, db }: { documentId: string; organizationId: string; name?: string; userId?: string; db: DatabaseClient }) {
const dbDocument = await db
.updateTable('documents')
.set(omitUndefined({
is_deleted: 0,
deleted_by: null,
deleted_at: null,
name,
original_name: name,
created_by: userId,
}))
.where('id', '=', documentId)
.where('organization_id', '=', organizationId)
.returningAll()
.executeTakeFirst();
const document = dbToDocument(dbDocument);
if (isNil(document)) {
throw createDocumentNotFoundError();
@@ -285,66 +321,63 @@ async function restoreDocument({ documentId, organizationId, name, userId, db }:
return { document };
}
async function hardDeleteDocument({ documentId, db }: { documentId: string; db: Database }) {
await db.delete(documentsTable).where(eq(documentsTable.id, documentId));
async function hardDeleteDocument({ documentId, db }: { documentId: string; db: DatabaseClient }) {
await db
.deleteFrom('documents')
.where('id', '=', documentId)
.execute();
}
async function getExpiredDeletedDocuments({ db, expirationDelayInDays, now = new Date() }: { db: Database; expirationDelayInDays: number; now?: Date }) {
async function getExpiredDeletedDocuments({ db, expirationDelayInDays, now = new Date() }: { db: DatabaseClient; expirationDelayInDays: number; now?: Date }) {
const expirationDate = subDays(now, expirationDelayInDays);
const documents = await db.select({
id: documentsTable.id,
originalStorageKey: documentsTable.originalStorageKey,
}).from(documentsTable).where(
and(
eq(documentsTable.isDeleted, true),
lt(documentsTable.deletedAt, expirationDate),
),
);
const documents = await db
.selectFrom('documents')
.select(['id', 'original_storage_key as originalStorageKey'])
.where('is_deleted', '=', 1)
.where('deleted_at', '<', expirationDate.getTime())
.execute();
return {
documents,
};
}
async function searchOrganizationDocuments({ organizationId, searchQuery, pageIndex, pageSize, db }: { organizationId: string; searchQuery: string; pageIndex: number; pageSize: number; db: Database }) {
async function searchOrganizationDocuments({ organizationId, searchQuery, pageIndex, pageSize, db }: { organizationId: string; searchQuery: string; pageIndex: number; pageSize: number; db: DatabaseClient }) {
// TODO: extract this logic to a tested function
// when searchquery is a single word, we append a wildcard to it to make it a prefix search
const cleanedSearchQuery = searchQuery.replace(/"/g, '').replace(/\*/g, '').trim();
const formattedSearchQuery = cleanedSearchQuery.includes(' ') ? cleanedSearchQuery : `${cleanedSearchQuery}*`;
const result = await db.run(sql`
SELECT * FROM ${documentsTable}
JOIN documents_fts ON documents_fts.id = ${documentsTable.id}
WHERE ${documentsTable.organizationId} = ${organizationId}
AND ${documentsTable.isDeleted} = 0
const result = await sql`
SELECT * FROM documents
JOIN documents_fts ON documents_fts.id = documents.id
WHERE documents.organization_id = ${organizationId}
AND documents.is_deleted = 0
AND documents_fts MATCH ${formattedSearchQuery}
ORDER BY rank
LIMIT ${pageSize}
OFFSET ${pageIndex * pageSize}
`);
`.execute(db);
return {
documents: result.rows as unknown as (typeof documentsTable.$inferSelect)[],
documents: (result.rows as any[]).map(row => dbToDocument(row)).filter((doc): doc is NonNullable<typeof doc> => doc !== undefined),
};
}
async function getOrganizationStats({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({
totalDocumentsCount: count(documentsTable.id),
totalDocumentsSize: sql<number>`COALESCE(SUM(${documentsTable.originalSize}), 0)`.as('totalDocumentsSize'),
deletedDocumentsCount: sql<number>`COUNT(${documentsTable.id}) FILTER (WHERE ${documentsTable.isDeleted} = true)`.as('deletedDocumentsCount'),
documentsCount: sql<number>`COUNT(${documentsTable.id}) FILTER (WHERE ${documentsTable.isDeleted} = false)`.as('documentsCount'),
documentsSize: sql<number>`COALESCE(SUM(${documentsTable.originalSize}) FILTER (WHERE ${documentsTable.isDeleted} = false), 0)`.as('documentsSize'),
deletedDocumentsSize: sql<number>`COALESCE(SUM(${documentsTable.originalSize}) FILTER (WHERE ${documentsTable.isDeleted} = true), 0)`.as('deletedDocumentsSize'),
})
.from(documentsTable)
.where(
and(
eq(documentsTable.organizationId, organizationId),
),
);
async function getOrganizationStats({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const record = await db
.selectFrom('documents')
.select([
sql<number>`COUNT(id)`.as('totalDocumentsCount'),
sql<number>`COALESCE(SUM(original_size), 0)`.as('totalDocumentsSize'),
sql<number>`COUNT(id) FILTER (WHERE is_deleted = 1)`.as('deletedDocumentsCount'),
sql<number>`COUNT(id) FILTER (WHERE is_deleted = 0)`.as('documentsCount'),
sql<number>`COALESCE(SUM(original_size) FILTER (WHERE is_deleted = 0), 0)`.as('documentsSize'),
sql<number>`COALESCE(SUM(original_size) FILTER (WHERE is_deleted = 1), 0)`.as('deletedDocumentsSize'),
])
.where('organization_id', '=', organizationId)
.executeTakeFirst();
if (isNil(record)) {
throw createOrganizationNotFoundError();
@@ -356,84 +389,109 @@ async function getOrganizationStats({ organizationId, db }: { organizationId: st
documentsCount,
documentsSize: Number(documentsSize ?? 0),
deletedDocumentsCount,
deletedDocumentsSize,
deletedDocumentsSize: Number(deletedDocumentsSize ?? 0),
totalDocumentsCount,
totalDocumentsSize,
totalDocumentsSize: Number(totalDocumentsSize ?? 0),
};
}
async function getAllOrganizationTrashDocuments({ organizationId, db }: { organizationId: string; db: Database }) {
const documents = await db.select({
id: documentsTable.id,
originalStorageKey: documentsTable.originalStorageKey,
}).from(documentsTable).where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, true),
),
);
async function getAllOrganizationTrashDocuments({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const documents = await db
.selectFrom('documents')
.select(['id', 'original_storage_key as originalStorageKey'])
.where('organization_id', '=', organizationId)
.where('is_deleted', '=', 1)
.execute();
return {
documents,
};
}
async function getAllOrganizationDocuments({ organizationId, db }: { organizationId: string; db: Database }) {
const documents = await db.select({
id: documentsTable.id,
originalStorageKey: documentsTable.originalStorageKey,
}).from(documentsTable).where(
eq(documentsTable.organizationId, organizationId),
);
async function getAllOrganizationDocuments({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const documents = await db
.selectFrom('documents')
.select(['id', 'original_storage_key as originalStorageKey'])
.where('organization_id', '=', organizationId)
.execute();
return {
documents,
};
}
function getAllOrganizationDocumentsIterator({ organizationId, batchSize = 100, db }: { organizationId: string; batchSize?: number; db: Database }) {
const query = db
.select({
id: documentsTable.id,
originalStorageKey: documentsTable.originalStorageKey,
})
.from(documentsTable)
.where(
eq(documentsTable.organizationId, organizationId),
)
.orderBy(documentsTable.createdAt)
.$dynamic();
async function* getAllOrganizationDocumentsIterator({ organizationId, batchSize = 100, db }: { organizationId: string; batchSize?: number; db: DatabaseClient }): AsyncGenerator<{ id: string; originalStorageKey: string }> {
let offset = 0;
return createIterator({ query, batchSize }) as AsyncGenerator<{ id: string; originalStorageKey: string }>;
while (true) {
const results = await db
.selectFrom('documents')
.select(['id', 'original_storage_key as originalStorageKey'])
.where('organization_id', '=', organizationId)
.orderBy('created_at', 'asc')
.limit(batchSize)
.offset(offset)
.execute();
if (results.length === 0) {
break;
}
for (const result of results) {
yield result;
}
if (results.length < batchSize) {
break;
}
offset += batchSize;
}
}
function getAllOrganizationUndeletedDocumentsIterator({ organizationId, batchSize = 100, db }: { organizationId: string; batchSize?: number; db: Database }) {
const query = db
.select()
.from(documentsTable)
.where(
and(
eq(documentsTable.organizationId, organizationId),
eq(documentsTable.isDeleted, false),
),
)
.orderBy(documentsTable.createdAt)
.$dynamic();
async function* getAllOrganizationUndeletedDocumentsIterator({ organizationId, batchSize = 100, db }: { organizationId: string; batchSize?: number; db: DatabaseClient }) {
let offset = 0;
return createIterator({ query, batchSize });
while (true) {
const dbResults = await db
.selectFrom('documents')
.selectAll()
.where('organization_id', '=', organizationId)
.where('is_deleted', '=', 0)
.orderBy('created_at', 'asc')
.limit(batchSize)
.offset(offset)
.execute();
if (dbResults.length === 0) {
break;
}
for (const dbResult of dbResults) {
const document = dbToDocument(dbResult);
if (document) {
yield document;
}
}
if (dbResults.length < batchSize) {
break;
}
offset += batchSize;
}
}
async function updateDocument({ documentId, organizationId, name, content, db }: { documentId: string; organizationId: string; name?: string; content?: string; db: Database }) {
const [document] = await db
.update(documentsTable)
async function updateDocument({ documentId, organizationId, name, content, db }: { documentId: string; organizationId: string; name?: string; content?: string; db: DatabaseClient }) {
const dbDocument = await db
.updateTable('documents')
.set(omitUndefined({ name, content }))
.where(
and(
eq(documentsTable.id, documentId),
eq(documentsTable.organizationId, organizationId),
),
)
.returning();
.where('id', '=', documentId)
.where('organization_id', '=', organizationId)
.returningAll()
.executeTakeFirst();
const document = dbToDocument(dbDocument);
if (isNil(document)) {
// This should never happen, but for type safety

View File

@@ -1,8 +1,7 @@
import { eq, sql } from 'drizzle-orm';
import { sql } from 'kysely';
import { describe, expect, test } from 'vitest';
import { createInMemoryDatabase } from '../app/database/database.test-utils';
import { ORGANIZATION_ROLES } from '../organizations/organizations.constants';
import { documentsTable } from './documents.table';
describe('documents table', () => {
describe('table documents_fts', () => {
@@ -14,32 +13,41 @@ describe('documents table', () => {
organizationMembers: [{ organizationId: 'organization-1', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER }],
});
await db.insert(documentsTable).values([
const now = Date.now();
await db.insertInto('documents').values([
{
id: 'document-1',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Document 1',
originalName: 'document-1.pdf',
originalStorageKey: 'document-1.pdf',
original_name: 'document-1.pdf',
original_storage_key: 'document-1.pdf',
original_size: 0,
content: 'lorem ipsum',
originalSha256Hash: 'hash1',
original_sha256_hash: 'hash1',
created_at: now,
updated_at: now,
is_deleted: 0,
},
{
id: 'document-2',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Photo 1',
originalName: 'photo-1.jpg',
originalStorageKey: 'photo-1.jpg',
original_name: 'photo-1.jpg',
original_storage_key: 'photo-1.jpg',
original_size: 0,
content: 'dolor sit amet',
originalSha256Hash: 'hash2',
original_sha256_hash: 'hash2',
created_at: now,
updated_at: now,
is_deleted: 0,
},
]);
]).execute();
const { rows } = await db.run(sql`SELECT * FROM documents_fts;`);
const { rows } = await db.executeQuery(sql`SELECT * FROM documents_fts;`.compile(db));
expect(rows).to.eql([
{
@@ -56,7 +64,7 @@ describe('documents table', () => {
},
]);
const { rows: searchResults } = await db.run(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'lorem';`);
const { rows: searchResults } = await db.executeQuery(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'lorem';`.compile(db));
expect(searchResults).to.eql([
{
@@ -75,34 +83,43 @@ describe('documents table', () => {
organizationMembers: [{ organizationId: 'organization-1', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER }],
});
await db.insert(documentsTable).values([
const now = Date.now();
await db.insertInto('documents').values([
{
id: 'document-1',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Document 1',
originalName: 'document-1.pdf',
originalStorageKey: 'document-1.pdf',
original_name: 'document-1.pdf',
original_storage_key: 'document-1.pdf',
original_size: 0,
content: 'lorem ipsum',
originalSha256Hash: 'hash1',
original_sha256_hash: 'hash1',
created_at: now,
updated_at: now,
is_deleted: 0,
},
{
id: 'document-2',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Photo 1',
originalName: 'photo-1.jpg',
originalStorageKey: 'photo-1.jpg',
original_name: 'photo-1.jpg',
original_storage_key: 'photo-1.jpg',
original_size: 0,
content: 'dolor sit amet',
originalSha256Hash: 'hash2',
original_sha256_hash: 'hash2',
created_at: now,
updated_at: now,
is_deleted: 0,
},
]);
]).execute();
await db.update(documentsTable).set({ content: 'foo bar baz' }).where(eq(documentsTable.id, 'document-1'));
await db.updateTable('documents').set({ content: 'foo bar baz' }).where('id', '=', 'document-1').execute();
const { rows } = await db.run(sql`SELECT * FROM documents_fts;`);
const { rows } = await db.executeQuery(sql`SELECT * FROM documents_fts;`.compile(db));
expect(rows).to.eql([
{
@@ -119,7 +136,7 @@ describe('documents table', () => {
},
]);
const { rows: searchResults } = await db.run(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'foo';`);
const { rows: searchResults } = await db.executeQuery(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'foo';`.compile(db));
expect(searchResults).to.eql([
{
@@ -138,34 +155,43 @@ describe('documents table', () => {
organizationMembers: [{ organizationId: 'organization-1', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER }],
});
await db.insert(documentsTable).values([
const now = Date.now();
await db.insertInto('documents').values([
{
id: 'document-1',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Document 1',
originalName: 'document-1.pdf',
originalStorageKey: 'document-1.pdf',
original_name: 'document-1.pdf',
original_storage_key: 'document-1.pdf',
original_size: 0,
content: 'lorem ipsum',
originalSha256Hash: 'hash1',
original_sha256_hash: 'hash1',
created_at: now,
updated_at: now,
is_deleted: 0,
},
{
id: 'document-2',
organizationId: 'organization-1',
createdBy: 'user-1',
mimeType: 'application/pdf',
organization_id: 'organization-1',
created_by: 'user-1',
mime_type: 'application/pdf',
name: 'Photo 1',
originalName: 'photo-1.jpg',
originalStorageKey: 'photo-1.jpg',
original_name: 'photo-1.jpg',
original_storage_key: 'photo-1.jpg',
original_size: 0,
content: 'dolor sit amet',
originalSha256Hash: 'hash2',
original_sha256_hash: 'hash2',
created_at: now,
updated_at: now,
is_deleted: 0,
},
]);
]).execute();
await db.delete(documentsTable).where(eq(documentsTable.id, 'document-1'));
await db.deleteFrom('documents').where('id', '=', 'document-1').execute();
const { rows } = await db.run(sql`SELECT * FROM documents_fts;`);
const { rows } = await db.executeQuery(sql`SELECT * FROM documents_fts;`.compile(db));
expect(rows).to.eql([
{
@@ -176,7 +202,7 @@ describe('documents table', () => {
},
]);
const { rows: searchResults } = await db.run(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'lorem';`);
const { rows: searchResults } = await db.executeQuery(sql`SELECT * FROM documents_fts WHERE documents_fts MATCH 'lorem';`.compile(db));
expect(searchResults).to.eql([]);
});

View File

@@ -1,9 +1,11 @@
import { index, integer, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core';
import { organizationsTable } from '../organizations/organizations.table';
import { createPrimaryKeyField, createTimestampColumns } from '../shared/db/columns.helpers';
import { usersTable } from '../users/users.table';
import { generateDocumentId } from './documents.models';
// Legacy stub for Drizzle schema - this file is no longer used in production
const usersTable = { id: '' } as any;
export const documentsTable = sqliteTable('documents', {
...createPrimaryKeyField({ idGenerator: generateDocumentId }),
...createTimestampColumns(),

View File

@@ -0,0 +1,41 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../app/database/database.columns.types';
export type DocumentsTable = TableWithIdAndTimestamps<{
organization_id: string;
created_by: string | null;
original_name: string;
original_size: number;
original_storage_key: string;
original_sha256_hash: string;
name: string;
mime_type: string;
content: string;
file_encryption_key_wrapped: string | null;
file_encryption_kek_version: string | null;
file_encryption_algorithm: string | null;
deleted_at: number | null;
deleted_by: string | null;
is_deleted: number;
}>;
export type DbSelectableDocument = Selectable<DocumentsTable>;
export type DbInsertableDocument = Insertable<DocumentsTable>;
export type DbUpdateableDocument = Updateable<DocumentsTable>;
export type InsertableDocument = BusinessInsertable<DbInsertableDocument, {
isDeleted?: boolean;
deletedAt?: Date | null;
}>;
export type Document = Expand<CamelCaseKeys<Omit<DbSelectableDocument, 'created_at' | 'updated_at' | 'is_deleted' | 'deleted_at'> & {
createdAt: Date;
updatedAt: Date;
isDeleted: boolean;
deletedAt: Date | null;
}>>;

View File

@@ -1,7 +1,7 @@
import type { Expand } from '@corentinth/chisels';
import type { documentsTable } from './documents.table';
export type DbInsertableDocument = Expand<typeof documentsTable.$inferInsert>;
export type DbSelectableDocument = Expand<typeof documentsTable.$inferSelect>;
export type Document = DbSelectableDocument;
// Re-export types from tables and models for backward compatibility
export type {
Document,
InsertableDocument,
DbInsertableDocument,
DbSelectableDocument,
} from './documents.tables';

View File

@@ -9,14 +9,12 @@ import { nextTick } from '../shared/async/defer.test-utils';
import { collectReadableStreamToString, createReadableStream } from '../shared/streams/readable-stream';
import { createTaggingRulesRepository } from '../tagging-rules/tagging-rules.repository';
import { createTagsRepository } from '../tags/tags.repository';
import { documentsTagsTable } from '../tags/tags.table';
import { createInMemoryTaskServices } from '../tasks/tasks.test-utils';
import { createWebhookRepository } from '../webhooks/webhook.repository';
import { createDocumentActivityRepository } from './document-activity/document-activity.repository';
import { documentActivityLogTable } from './document-activity/document-activity.table';
import { createDocumentAlreadyExistsError, createDocumentSizeTooLargeError } from './documents.errors';
import { dbToDocument } from './documents.models';
import { createDocumentsRepository } from './documents.repository';
import { documentsTable } from './documents.table';
import { createDocumentCreationUsecase, extractAndSaveDocumentFileContent } from './documents.usecases';
import { createDocumentStorageService } from './storage/documents.storage.services';
import { inMemoryStorageDriverFactory } from './storage/drivers/memory/memory.storage-driver';
@@ -77,7 +75,8 @@ describe('documents usecases', () => {
expect(content).to.eql('Hello, world!');
// Ensure the document record is saved in the database
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords).to.eql([document]);
});
@@ -143,7 +142,8 @@ describe('documents usecases', () => {
createDocumentAlreadyExistsError(),
);
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords.map(({ id }) => id)).to.eql(['doc_1']);
@@ -180,6 +180,7 @@ describe('documents usecases', () => {
originalStorageKey: 'organization-1/originals/document-1.txt',
name: 'file-1.txt',
originalName: 'file-1.txt',
originalSize: 13,
content: 'Hello, world!',
}],
documentsTags: [{
@@ -193,7 +194,7 @@ describe('documents usecases', () => {
{ id: 'tagging-rule-condition-1', taggingRuleId: 'tagging-rule-1', field: 'content', operator: 'contains', value: 'hello' },
],
taggingRuleActions: [
{ id: 'tagging-rule-action-1', taggingRuleId: 'tagging-rule-1', tagId: 'tag-2' },
{ id: 'tagging-rule-action-1', taggingRuleId: 'tagging-rule-1', tagId: 'tag-2' } as any,
],
});
@@ -228,17 +229,18 @@ describe('documents usecases', () => {
deletedAt: null,
});
const documentsRecordsAfterRestoration = await db.select().from(documentsTable);
const dbDocumentsAfterRestoration = await db.selectFrom('documents').selectAll().execute();
const documentsRecordsAfterRestoration = dbDocumentsAfterRestoration.map(dbToDocument).filter(Boolean);
expect(documentsRecordsAfterRestoration.length).to.eql(1);
expect(documentsRecordsAfterRestoration[0]).to.eql(documentRestored);
const documentsTagsRecordsAfterRestoration = await db.select().from(documentsTagsTable);
const documentsTagsRecordsAfterRestoration = await db.selectFrom('documents_tags').selectAll().execute();
expect(documentsTagsRecordsAfterRestoration).to.eql([{
documentId: 'document-1',
tagId: 'tag-2',
document_id: 'document-1',
tag_id: 'tag-2',
}]);
});
@@ -285,7 +287,8 @@ describe('documents usecases', () => {
}),
).rejects.toThrow(new Error('Macron, explosion!'));
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords).to.eql([]);
@@ -332,22 +335,22 @@ describe('documents usecases', () => {
await nextTick();
const documentActivityLogRecords = await db.select().from(documentActivityLogTable);
const documentActivityLogRecords = await db.selectFrom('document_activity_log').selectAll().execute();
expect(documentActivityLogRecords.length).to.eql(2);
expect(documentActivityLogRecords[0]).to.deep.include({
event: 'created',
eventData: null,
userId: 'user-1',
documentId: 'doc_1',
event_data: null,
user_id: 'user-1',
document_id: 'doc_1',
});
expect(documentActivityLogRecords[1]).to.deep.include({
event: 'created',
eventData: null,
userId: null,
documentId: 'doc_2',
event_data: null,
user_id: null,
document_id: 'doc_2',
});
});
@@ -390,7 +393,8 @@ describe('documents usecases', () => {
).rejects.toThrow(createOrganizationDocumentStorageLimitReachedError());
// Ensure no document is saved in the db
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords.length).to.eql(0);
// Ensure no file is saved in the storage
@@ -461,7 +465,8 @@ describe('documents usecases', () => {
}).then(resolve),
]);
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords.length).to.eql(1);
expect(documentRecords[0]).to.deep.include({
@@ -513,7 +518,8 @@ describe('documents usecases', () => {
).rejects.toThrow(createDocumentSizeTooLargeError());
// Ensure no document is saved in the db
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords.length).to.eql(0);
// Ensure no file is saved in the storage
@@ -539,15 +545,20 @@ describe('documents usecases', () => {
const taggingRulesRepository = createTaggingRulesRepository({ db });
const tagsRepository = createTagsRepository({ db });
await db.insert(documentsTable).values({
await db.insertInto('documents').values({
id: 'document-1',
organizationId: 'organization-1',
originalStorageKey: 'organization-1/originals/document-1.txt',
mimeType: 'text/plain',
organization_id: 'organization-1',
original_storage_key: 'organization-1/originals/document-1.txt',
original_size: 0,
mime_type: 'text/plain',
name: 'file-1.txt',
originalName: 'file-1.txt',
originalSha256Hash: 'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9',
});
original_name: 'file-1.txt',
original_sha256_hash: 'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9',
content: '',
created_at: Date.now(),
updated_at: Date.now(),
is_deleted: 0,
}).execute();
await documentsStorageService.saveFile({
fileStream: createReadableStream({ content: 'hello world' }),
@@ -570,7 +581,8 @@ describe('documents usecases', () => {
documentActivityRepository,
});
const documentRecords = await db.select().from(documentsTable);
const dbDocuments = await db.selectFrom('documents').selectAll().execute();
const documentRecords = dbDocuments.map(dbToDocument).filter(Boolean);
expect(documentRecords.length).to.eql(1);
expect(documentRecords[0]).to.deep.include({

View File

@@ -1,5 +1,5 @@
import type { Readable } from 'node:stream';
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { Config } from '../config/config.types';
import type { PlansRepository } from '../plans/plans.repository';
import type { Logger } from '../shared/logger/logger';
@@ -39,6 +39,7 @@ import { createDocumentAlreadyExistsError, createDocumentNotDeletedError, create
import { buildOriginalDocumentKey, generateDocumentId as generateDocumentIdImpl } from './documents.models';
import { createDocumentsRepository } from './documents.repository';
import { extractDocumentText } from './documents.services';
import { documentToDb } from './documents.models';
type DocumentStorageContext = {
storageKey: string;
@@ -197,7 +198,7 @@ export function createDocumentCreationUsecase({
documentsStorageService,
...initialDeps
}: {
db: Database;
db: DatabaseClient;
taskServices: TaskServices;
documentsStorageService: DocumentStorageService;
config: Config;
@@ -318,20 +319,23 @@ async function createNewDocument({
throw createOrganizationDocumentStorageLimitReachedError();
}
const [result, error] = await safely(documentsRepository.saveOrganizationDocument({
id: documentId,
name: fileName,
organizationId,
originalName: fileName,
createdBy: userId,
originalSize: size,
originalStorageKey: newFileStorageContext.storageKey,
fileEncryptionAlgorithm: newFileStorageContext.fileEncryptionAlgorithm,
fileEncryptionKekVersion: newFileStorageContext.fileEncryptionKekVersion,
fileEncryptionKeyWrapped: newFileStorageContext.fileEncryptionKeyWrapped,
mimeType,
originalSha256Hash: hash,
}));
const [result, error] = await safely(documentsRepository.saveOrganizationDocument(
documentToDb({
id: documentId,
name: fileName,
organizationId,
originalName: fileName,
createdBy: userId,
originalSize: size,
originalStorageKey: newFileStorageContext.storageKey,
fileEncryptionAlgorithm: newFileStorageContext.fileEncryptionAlgorithm,
fileEncryptionKekVersion: newFileStorageContext.fileEncryptionKekVersion,
fileEncryptionKeyWrapped: newFileStorageContext.fileEncryptionKeyWrapped,
mimeType,
originalSha256Hash: hash,
content: '', // Content will be extracted later by the extract-document-file-content task
}),
));
if (error) {
logger.error({ error }, 'Error while creating document');

View File

@@ -121,7 +121,7 @@ describe('documents e2e', () => {
});
// Ensure no document is saved in the db
const documentRecords = await db.select().from(documentsTable);
const documentRecords = await db.selectFrom('documents').selectAll().execute();
expect(documentRecords.length).to.eql(0);
// Ensure no file is saved in the storage

View File

@@ -4,7 +4,6 @@ import { Readable } from 'node:stream';
import { describe, expect, test } from 'vitest';
import { createInMemoryDatabase } from '../../../app/database/database.test-utils';
import { overrideConfig } from '../../../config/config.test-utils';
import { documentsTable } from '../../documents.table';
import { createDocumentCreationUsecase } from '../../documents.usecases';
import { createDocumentStorageServiceFromDriver } from '../documents.storage.services';
import { inMemoryStorageDriverFactory } from '../drivers/memory/memory.storage-driver';
@@ -91,14 +90,15 @@ describe('document-encryption usecases', () => {
// All documents should be encrypted
const [newDocument1, newDocument2, newDocument3] = await db.select().from(documentsTable).orderBy(documentsTable.createdAt);
const documents = await db.selectFrom('documents').selectAll().orderBy('created_at').execute();
const [newDocument1, newDocument2, newDocument3] = documents;
expect(storage.get(newDocument1!.originalStorageKey)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
expect(storage.get(newDocument2!.originalStorageKey)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
expect(storage.get(newDocument3!.originalStorageKey)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
expect(storage.get(newDocument1!.original_storage_key)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
expect(storage.get(newDocument2!.original_storage_key)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
expect(storage.get(newDocument3!.original_storage_key)?.content.subarray(0, 4).toString('utf-8')).toEqual('PP01');
// The document 3 should have the same original storage key
expect(document3.originalStorageKey).to.eql(newDocument3!.originalStorageKey);
expect(document3.originalStorageKey).to.eql(newDocument3!.original_storage_key);
});
});
});

View File

@@ -1,9 +1,7 @@
import type { Logger } from '@crowlog/logger';
import type { Database } from '../../../app/database/database.types';
import type { DatabaseClient } from '../../../app/database/database.types';
import type { DocumentStorageService } from '../documents.storage.services';
import { eq, isNull } from 'drizzle-orm';
import { createLogger } from '../../../shared/logger/logger';
import { documentsTable } from '../../documents.table';
export async function encryptAllUnencryptedDocuments({
db,
@@ -11,53 +9,55 @@ export async function encryptAllUnencryptedDocuments({
logger = createLogger({ namespace: 'encryptAllUnencryptedDocuments' }),
deleteUnencryptedAfterEncryption = true,
}: {
db: Database;
db: DatabaseClient;
logger?: Logger;
documentStorageService: DocumentStorageService;
deleteUnencryptedAfterEncryption?: boolean;
}) {
const documents = await db
.select({
id: documentsTable.id,
originalStorageKey: documentsTable.originalStorageKey,
fileName: documentsTable.originalName,
mimeType: documentsTable.mimeType,
})
.from(documentsTable)
.where(isNull(documentsTable.fileEncryptionKeyWrapped))
.orderBy(documentsTable.id);
.selectFrom('documents')
.select([
'id',
'original_storage_key',
'original_name',
'mime_type',
])
.where('file_encryption_key_wrapped', 'is', null)
.orderBy('id')
.execute();
logger.info(`Found ${documents.length} documents to encrypt`);
for (const { id, originalStorageKey, fileName, mimeType } of documents) {
logger.info(`Encrypting document ${id}`);
for (const doc of documents) {
logger.info(`Encrypting document ${doc.id}`);
const { fileStream } = await documentStorageService.getFileStream({
storageKey: originalStorageKey,
storageKey: doc.original_storage_key,
fileEncryptionKeyWrapped: null,
fileEncryptionAlgorithm: null,
fileEncryptionKekVersion: null,
});
const newStorageKey = `${originalStorageKey}.enc`;
const newStorageKey = `${doc.original_storage_key}.enc`;
const { storageKey, ...encryptionFields }
= await documentStorageService.saveFile({
fileStream,
fileName,
mimeType,
fileName: doc.original_name,
mimeType: doc.mime_type,
storageKey: newStorageKey,
});
await db
.update(documentsTable)
.updateTable('documents')
.set({
...encryptionFields,
originalStorageKey: storageKey,
original_storage_key: storageKey,
})
.where(eq(documentsTable.id, id));
.where('id', '=', doc.id)
.execute();
if (deleteUnencryptedAfterEncryption) {
await documentStorageService.deleteFile({
storageKey: originalStorageKey,
storageKey: doc.original_storage_key,
});
}
}

View File

@@ -1,4 +1,4 @@
import type { Database } from '../../app/database/database.types';
import type { DatabaseClient } from '../../app/database/database.types';
import type { TaskServices } from '../../tasks/tasks.services';
import type { DocumentStorageService } from '../storage/documents.storage.services';
import { createTaggingRulesRepository } from '../../tagging-rules/tagging-rules.repository';
@@ -8,7 +8,7 @@ import { createDocumentActivityRepository } from '../document-activity/document-
import { createDocumentsRepository } from '../documents.repository';
import { extractAndSaveDocumentFileContent } from '../documents.usecases';
export async function registerExtractDocumentFileContentTask({ taskServices, db, documentsStorageService }: { taskServices: TaskServices; db: Database; documentsStorageService: DocumentStorageService }) {
export async function registerExtractDocumentFileContentTask({ taskServices, db, documentsStorageService }: { taskServices: TaskServices; db: DatabaseClient; documentsStorageService: DocumentStorageService }) {
const taskName = 'extract-document-file-content';
taskServices.registerTask({

View File

@@ -1,4 +1,4 @@
import type { Database } from '../../app/database/database.types';
import type { DatabaseClient } from '../../app/database/database.types';
import type { Config } from '../../config/config.types';
import type { TaskServices } from '../../tasks/tasks.services';
import type { DocumentStorageService } from '../storage/documents.storage.services';
@@ -8,7 +8,7 @@ import { deleteExpiredDocuments } from '../documents.usecases';
const logger = createLogger({ namespace: 'documents:tasks:hardDeleteExpiredDocuments' });
export async function registerHardDeleteExpiredDocumentsTask({ taskServices, db, config, documentsStorageService }: { taskServices: TaskServices; db: Database; config: Config; documentsStorageService: DocumentStorageService }) {
export async function registerHardDeleteExpiredDocumentsTask({ taskServices, db, config, documentsStorageService }: { taskServices: TaskServices; db: DatabaseClient; config: Config; documentsStorageService: DocumentStorageService }) {
const taskName = 'hard-delete-expired-documents';
const { cron, runOnStartup } = config.tasks.hardDeleteExpiredDocuments;

View File

@@ -56,7 +56,7 @@ describe('ingestion-folders usecases', () => {
});
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);
expect(documents[0]).to.deep.include({
@@ -158,7 +158,7 @@ describe('ingestion-folders usecases', () => {
});
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);
expect(documents[0]).to.deep.include({
@@ -263,7 +263,7 @@ describe('ingestion-folders usecases', () => {
expect(error).to.deep.equal(createInvalidPostProcessingStrategyError({ strategy: 'unknown' }));
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);
expect(documents[0]).to.deep.include({
@@ -354,7 +354,7 @@ describe('ingestion-folders usecases', () => {
);
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(0);
});
@@ -407,7 +407,7 @@ describe('ingestion-folders usecases', () => {
});
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(0);
@@ -443,7 +443,7 @@ describe('ingestion-folders usecases', () => {
const { db } = await createInMemoryDatabase({
organizations: [{ id: 'org_111111111111111111111111', name: 'Org 1' }],
documents: [{ id: 'doc_1', organizationId: 'org_111111111111111111111111', name: 'hello.md', originalName: 'hello.md', originalStorageKey: 'hello.md', originalSha256Hash: loremIpsumSha256Hash, mimeType: 'text/markdown' }],
documents: [{ id: 'doc_1', organizationId: 'org_111111111111111111111111', name: 'hello.md', originalName: 'hello.md', originalStorageKey: 'hello.md', originalSha256Hash: loremIpsumSha256Hash, mimeType: 'text/markdown', content: '', originalSize: 0 }],
});
const organizationsRepository = createOrganizationsRepository({ db });
@@ -479,7 +479,7 @@ describe('ingestion-folders usecases', () => {
});
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);
expect(documents[0]?.id).to.equal('doc_1');
@@ -520,7 +520,7 @@ describe('ingestion-folders usecases', () => {
const { db } = await createInMemoryDatabase({
organizations: [{ id: 'org_111111111111111111111111', name: 'Org 1' }],
documents: [{ id: 'doc_1', organizationId: 'org_111111111111111111111111', name: 'hello.md', originalName: 'hello.md', originalStorageKey: 'hello.md', originalSha256Hash: loremIpsumSha256Hash, mimeType: 'text/markdown' }],
documents: [{ id: 'doc_1', organizationId: 'org_111111111111111111111111', name: 'hello.md', originalName: 'hello.md', originalStorageKey: 'hello.md', originalSha256Hash: loremIpsumSha256Hash, mimeType: 'text/markdown', content: '', originalSize: 0 }],
});
const organizationsRepository = createOrganizationsRepository({ db });
@@ -555,7 +555,7 @@ describe('ingestion-folders usecases', () => {
});
// Check database
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);
expect(documents[0]?.id).to.equal('doc_1');

View File

@@ -1,5 +1,5 @@
import type { Stats } from 'node:fs';
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { Config } from '../config/config.types';
import type { CreateDocumentUsecase } from '../documents/documents.usecases';
import type { DocumentStorageService } from '../documents/storage/documents.storage.services';
@@ -34,7 +34,7 @@ export function createIngestionFolderWatcher({
}: {
config: Config;
logger?: Logger;
db: Database;
db: DatabaseClient;
taskServices: TaskServices;
documentsStorageService: DocumentStorageService;
}) {

View File

@@ -147,7 +147,7 @@ describe('intake-emails e2e', () => {
expect(response.status).to.eql(202);
const documents = await db.select().from(documentsTable);
const documents = await db.selectFrom('documents').selectAll().execute();
expect(documents).to.have.length(1);

View File

@@ -1,5 +1,10 @@
import type { DbInsertableIntakeEmail, DbSelectableIntakeEmail, InsertableIntakeEmail, IntakeEmail } from './intake-emails.new.tables';
import { createError } from '../shared/errors/errors';
import { generateId } from '../shared/random/ids';
import { isDefined, isNil } from '../shared/utils';
import { INTAKE_EMAIL_ID_PREFIX } from './intake-emails.constants';
const generateIntakeEmailId = () => generateId({ prefix: INTAKE_EMAIL_ID_PREFIX });
export function buildEmailAddress({
username,
@@ -59,3 +64,42 @@ export function getIsFromAllowedOrigin({
.map(allowedOrigin => allowedOrigin.toLowerCase())
.includes(origin.toLowerCase());
}
// DB <-> Business model transformers
export function dbToIntakeEmail(dbIntakeEmail?: DbSelectableIntakeEmail): IntakeEmail | undefined {
if (!dbIntakeEmail) {
return undefined;
}
return {
id: dbIntakeEmail.id,
emailAddress: dbIntakeEmail.email_address,
organizationId: dbIntakeEmail.organization_id,
allowedOrigins: JSON.parse(dbIntakeEmail.allowed_origins) as string[],
isEnabled: dbIntakeEmail.is_enabled === 1,
createdAt: new Date(dbIntakeEmail.created_at),
updatedAt: new Date(dbIntakeEmail.updated_at),
};
}
export function intakeEmailToDb(
intakeEmail: InsertableIntakeEmail,
{
now = new Date(),
generateId = generateIntakeEmailId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableIntakeEmail {
return {
id: intakeEmail.id ?? generateId(),
email_address: intakeEmail.emailAddress,
organization_id: intakeEmail.organizationId,
allowed_origins: JSON.stringify(intakeEmail.allowedOrigins ?? []),
is_enabled: intakeEmail.isEnabled === true ? 1 : 0,
created_at: intakeEmail.createdAt?.getTime() ?? now.getTime(),
updated_at: intakeEmail.updatedAt?.getTime() ?? now.getTime(),
};
}

View File

@@ -0,0 +1,26 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../app/database/database.columns.types';
export type IntakeEmailsTable = TableWithIdAndTimestamps<{
email_address: string;
organization_id: string;
allowed_origins: string;
is_enabled: number;
}>;
export type DbSelectableIntakeEmail = Selectable<IntakeEmailsTable>;
export type DbInsertableIntakeEmail = Insertable<IntakeEmailsTable>;
export type DbUpdateableIntakeEmail = Updateable<IntakeEmailsTable>;
export type InsertableIntakeEmail = BusinessInsertable<DbInsertableIntakeEmail, {
allowedOrigins?: string[];
isEnabled?: boolean;
}>;
export type IntakeEmail = Expand<CamelCaseKeys<Omit<DbSelectableIntakeEmail, 'created_at' | 'updated_at' | 'allowed_origins' | 'is_enabled'> & {
createdAt: Date;
updatedAt: Date;
allowedOrigins: string[];
isEnabled: boolean;
}>>;

View File

@@ -1,15 +1,15 @@
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import { injectArguments, safely } from '@corentinth/chisels';
import { and, count, eq } from 'drizzle-orm';
import { sql } from 'kysely';
import { isUniqueConstraintError } from '../shared/db/constraints.models';
import { createError } from '../shared/errors/errors';
import { omitUndefined } from '../shared/utils';
import { createIntakeEmailAlreadyExistsError, createIntakeEmailNotFoundError } from './intake-emails.errors';
import { intakeEmailsTable } from './intake-emails.tables';
import { dbToIntakeEmail, intakeEmailToDb } from './intake-emails.models';
export type IntakeEmailsRepository = ReturnType<typeof createIntakeEmailsRepository>;
export function createIntakeEmailsRepository({ db }: { db: Database }) {
export function createIntakeEmailsRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
createIntakeEmail,
@@ -24,8 +24,14 @@ export function createIntakeEmailsRepository({ db }: { db: Database }) {
);
}
async function createIntakeEmail({ organizationId, emailAddress, db }: { organizationId: string; emailAddress: string; db: Database }) {
const [result, error] = await safely(db.insert(intakeEmailsTable).values({ organizationId, emailAddress }).returning());
async function createIntakeEmail({ organizationId, emailAddress, db }: { organizationId: string; emailAddress: string; db: DatabaseClient }) {
const [result, error] = await safely(
db
.insertInto('intake_emails')
.values(intakeEmailToDb({ organizationId, emailAddress }))
.returningAll()
.executeTakeFirst(),
);
if (isUniqueConstraintError({ error })) {
throw createIntakeEmailAlreadyExistsError();
@@ -35,7 +41,7 @@ async function createIntakeEmail({ organizationId, emailAddress, db }: { organiz
throw error;
}
const [intakeEmail] = result;
const intakeEmail = dbToIntakeEmail(result);
if (!intakeEmail) {
// Very unlikely to happen as the insertion should throw an issue, it's for type safety
@@ -50,22 +56,26 @@ async function createIntakeEmail({ organizationId, emailAddress, db }: { organiz
return { intakeEmail };
}
async function updateIntakeEmail({ intakeEmailId, organizationId, isEnabled, allowedOrigins, db }: { intakeEmailId: string; organizationId: string; isEnabled?: boolean; allowedOrigins?: string[]; db: Database }) {
const [intakeEmail] = await db
.update(intakeEmailsTable)
.set(
omitUndefined({
isEnabled,
allowedOrigins,
}),
)
.where(
and(
eq(intakeEmailsTable.id, intakeEmailId),
eq(intakeEmailsTable.organizationId, organizationId),
),
)
.returning();
async function updateIntakeEmail({ intakeEmailId, organizationId, isEnabled, allowedOrigins, db }: { intakeEmailId: string; organizationId: string; isEnabled?: boolean; allowedOrigins?: string[]; db: DatabaseClient }) {
const updates: { is_enabled?: number; allowed_origins?: string } = {};
if (isEnabled !== undefined) {
updates.is_enabled = isEnabled ? 1 : 0;
}
if (allowedOrigins !== undefined) {
updates.allowed_origins = JSON.stringify(allowedOrigins);
}
const dbIntakeEmail = await db
.updateTable('intake_emails')
.set(omitUndefined(updates))
.where('id', '=', intakeEmailId)
.where('organization_id', '=', organizationId)
.returningAll()
.executeTakeFirst();
const intakeEmail = dbToIntakeEmail(dbIntakeEmail);
if (!intakeEmail) {
throw createIntakeEmailNotFoundError();
@@ -74,64 +84,63 @@ async function updateIntakeEmail({ intakeEmailId, organizationId, isEnabled, all
return { intakeEmail };
}
async function getIntakeEmail({ intakeEmailId, organizationId, db }: { intakeEmailId: string; organizationId: string; db: Database }) {
const [intakeEmail] = await db
.select()
.from(intakeEmailsTable)
.where(
and(
eq(intakeEmailsTable.id, intakeEmailId),
eq(intakeEmailsTable.organizationId, organizationId),
),
);
async function getIntakeEmail({ intakeEmailId, organizationId, db }: { intakeEmailId: string; organizationId: string; db: DatabaseClient }) {
const dbIntakeEmail = await db
.selectFrom('intake_emails')
.where('id', '=', intakeEmailId)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
const intakeEmail = dbToIntakeEmail(dbIntakeEmail);
return { intakeEmail };
}
async function getIntakeEmailByEmailAddress({ emailAddress, db }: { emailAddress: string; db: Database }) {
const [intakeEmail] = await db
.select()
.from(intakeEmailsTable)
.where(eq(intakeEmailsTable.emailAddress, emailAddress));
async function getIntakeEmailByEmailAddress({ emailAddress, db }: { emailAddress: string; db: DatabaseClient }) {
const dbIntakeEmail = await db
.selectFrom('intake_emails')
.where('email_address', '=', emailAddress)
.selectAll()
.executeTakeFirst();
const intakeEmail = dbToIntakeEmail(dbIntakeEmail);
return { intakeEmail };
}
async function getOrganizationIntakeEmails({ organizationId, db }: { organizationId: string; db: Database }) {
const intakeEmails = await db
.select()
.from(intakeEmailsTable)
.where(
eq(intakeEmailsTable.organizationId, organizationId),
);
async function getOrganizationIntakeEmails({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const dbIntakeEmails = await db
.selectFrom('intake_emails')
.where('organization_id', '=', organizationId)
.selectAll()
.execute();
const intakeEmails = dbIntakeEmails.map(dbEmail => dbToIntakeEmail(dbEmail)).filter((email): email is NonNullable<typeof email> => email !== undefined);
return { intakeEmails };
}
async function deleteIntakeEmail({ intakeEmailId, organizationId, db }: { intakeEmailId: string; organizationId: string; db: Database }) {
async function deleteIntakeEmail({ intakeEmailId, organizationId, db }: { intakeEmailId: string; organizationId: string; db: DatabaseClient }) {
await db
.delete(intakeEmailsTable)
.where(
and(
eq(intakeEmailsTable.id, intakeEmailId),
eq(intakeEmailsTable.organizationId, organizationId),
),
);
.deleteFrom('intake_emails')
.where('id', '=', intakeEmailId)
.where('organization_id', '=', organizationId)
.execute();
}
async function getOrganizationIntakeEmailsCount({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({ intakeEmailCount: count() })
.from(intakeEmailsTable)
.where(
eq(intakeEmailsTable.organizationId, organizationId),
);
async function getOrganizationIntakeEmailsCount({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const result = await db
.selectFrom('intake_emails')
.select(sql<number>`count(*)`.as('intake_email_count'))
.where('organization_id', '=', organizationId)
.executeTakeFirst();
if (!record) {
if (!result) {
throw createIntakeEmailNotFoundError();
}
const { intakeEmailCount } = record;
const { intake_email_count: intakeEmailCount } = result;
return { intakeEmailCount };
}

View File

@@ -49,10 +49,10 @@ describe('intake-emails usecases', () => {
createDocument,
});
const documents = await db.select().from(documentsTable).orderBy(asc(documentsTable.name));
const documents = await db.selectFrom('documents').selectAll().orderBy('name', 'asc').execute();
expect(
documents.map(doc => pick(doc, ['organizationId', 'name', 'mimeType', 'originalName'])),
documents.map((doc: typeof documents[0]) => pick(doc, ['organizationId', 'name', 'mimeType', 'originalName'])),
).to.eql([
{ organizationId: 'org-1', name: 'file1.txt', mimeType: 'text/plain', originalName: 'file1.txt' },
{ organizationId: 'org-1', name: 'file2.txt', mimeType: 'text/plain', originalName: 'file2.txt' },
@@ -92,7 +92,7 @@ describe('intake-emails usecases', () => {
expect(loggerTransport.getLogs({ excludeTimestampMs: true })).to.eql([
{ level: 'info', message: 'Intake email is disabled', namespace: 'test', data: {} },
]);
expect(await db.select().from(documentsTable)).to.eql([]);
expect(await db.selectFrom('documents').selectAll().execute()).to.eql([]);
});
test('when no intake email is found for the recipient, nothing happens, only a log is emitted', async () => {
@@ -125,7 +125,7 @@ describe('intake-emails usecases', () => {
expect(loggerTransport.getLogs({ excludeTimestampMs: true })).to.eql([
{ level: 'info', message: 'Intake email not found', namespace: 'test', data: { } },
]);
expect(await db.select().from(documentsTable)).to.eql([]);
expect(await db.selectFrom('documents').selectAll().execute()).to.eql([]);
});
test(`in order to be processed, the emitter of the email must be allowed for the intake email
@@ -170,7 +170,7 @@ describe('intake-emails usecases', () => {
},
},
]);
expect(await db.select().from(documentsTable)).to.eql([]);
expect(await db.selectFrom('documents').selectAll().execute()).to.eql([]);
});
});
});
@@ -210,10 +210,10 @@ describe('intake-emails usecases', () => {
createDocument,
});
const documents = await db.select().from(documentsTable).orderBy(asc(documentsTable.organizationId));
const documents = await db.selectFrom('documents').selectAll().orderBy('organization_id', 'asc').execute();
expect(
documents.map(doc => pick(doc, ['organizationId', 'name', 'mimeType', 'originalName'])),
documents.map((doc: typeof documents[0]) => pick(doc, ['organizationId', 'name', 'mimeType', 'originalName'])),
).to.eql([
{ organizationId: 'org-1', name: 'file1.txt', mimeType: 'text/plain', originalName: 'file1.txt' },
{ organizationId: 'org-2', name: 'file1.txt', mimeType: 'text/plain', originalName: 'file1.txt' },
@@ -263,10 +263,15 @@ describe('intake-emails usecases', () => {
intakeEmailsRepository,
});
await db.insert(intakeEmailsTable).values({
organizationId: 'org-1',
emailAddress: 'email-2@papra.email',
});
await db.insertInto('intake_emails').values({
id: 'intake-email-2',
organization_id: 'org-1',
email_address: 'email-2@papra.email',
created_at: Date.now(),
updated_at: Date.now(),
allowed_origins: JSON.stringify([]),
is_enabled: 1,
}).execute();
await expect(
checkIfOrganizationCanCreateNewIntakeEmail({

View File

@@ -1,5 +1,30 @@
import type {
DbInsertableOrganization,
DbInsertableOrganizationInvitation,
DbInsertableOrganizationMember,
DbSelectableOrganization,
DbSelectableOrganizationInvitation,
DbSelectableOrganizationMember,
InsertableOrganization,
InsertableOrganizationInvitation,
InsertableOrganizationMember,
Organization,
OrganizationInvitation,
OrganizationMember,
} from './organizations.tables';
import type { OrganizationRole } from './organizations.types';
import { ORGANIZATION_ROLES } from './organizations.constants';
import { generateId } from '../shared/random/ids';
import { isNil } from '../shared/utils';
import {
ORGANIZATION_ID_PREFIX,
ORGANIZATION_INVITATION_ID_PREFIX,
ORGANIZATION_MEMBER_ID_PREFIX,
ORGANIZATION_ROLES,
} from './organizations.constants';
const generateOrganizationId = () => generateId({ prefix: ORGANIZATION_ID_PREFIX });
const generateOrganizationMemberId = () => generateId({ prefix: ORGANIZATION_MEMBER_ID_PREFIX });
const generateOrganizationInvitationId = () => generateId({ prefix: ORGANIZATION_INVITATION_ID_PREFIX });
export function canUserRemoveMemberFromOrganization({
userRole,
@@ -18,3 +43,128 @@ export function canUserRemoveMemberFromOrganization({
return true;
}
// DB <-> Business model transformers
// Organization transformers
export function dbToOrganization(dbOrganization?: DbSelectableOrganization): Organization | undefined {
if (!dbOrganization) {
return undefined;
}
return {
id: dbOrganization.id,
name: dbOrganization.name,
customerId: dbOrganization.customer_id,
deletedBy: dbOrganization.deleted_by,
createdAt: new Date(dbOrganization.created_at),
updatedAt: new Date(dbOrganization.updated_at),
deletedAt: isNil(dbOrganization.deleted_at) ? null : new Date(dbOrganization.deleted_at),
scheduledPurgeAt: isNil(dbOrganization.scheduled_purge_at) ? null : new Date(dbOrganization.scheduled_purge_at),
};
}
export function organizationToDb(
organization: InsertableOrganization,
{
now = new Date(),
generateId = generateOrganizationId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableOrganization {
return {
id: organization.id ?? generateId(),
name: organization.name,
customer_id: organization.customerId,
deleted_by: organization.deletedBy,
created_at: organization.createdAt?.getTime() ?? now.getTime(),
updated_at: organization.updatedAt?.getTime() ?? now.getTime(),
deleted_at: organization.deletedAt?.getTime(),
scheduled_purge_at: organization.scheduledPurgeAt?.getTime(),
};
}
// Organization Member transformers
export function dbToOrganizationMember(dbMember: undefined): undefined;
export function dbToOrganizationMember(dbMember: DbSelectableOrganizationMember): OrganizationMember;
export function dbToOrganizationMember(dbMember?: DbSelectableOrganizationMember): OrganizationMember | undefined {
if (!dbMember) {
return undefined;
}
return {
id: dbMember.id,
organizationId: dbMember.organization_id,
userId: dbMember.user_id,
role: dbMember.role as 'owner' | 'admin' | 'member',
createdAt: new Date(dbMember.created_at),
updatedAt: new Date(dbMember.updated_at),
};
}
export function organizationMemberToDb(
member: InsertableOrganizationMember,
{
now = new Date(),
generateId = generateOrganizationMemberId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableOrganizationMember {
return {
id: member.id ?? generateId(),
organization_id: member.organizationId,
user_id: member.userId,
role: member.role,
created_at: member.createdAt?.getTime() ?? now.getTime(),
updated_at: member.updatedAt?.getTime() ?? now.getTime(),
};
}
// Organization Invitation transformers
export function dbToOrganizationInvitation(dbInvitation?: DbSelectableOrganizationInvitation): OrganizationInvitation | undefined {
if (!dbInvitation) {
return undefined;
}
return {
id: dbInvitation.id,
organizationId: dbInvitation.organization_id,
email: dbInvitation.email,
role: dbInvitation.role as 'owner' | 'admin' | 'member',
status: dbInvitation.status as 'pending' | 'accepted' | 'rejected' | 'expired' | 'cancelled',
inviterId: dbInvitation.inviter_id,
createdAt: new Date(dbInvitation.created_at),
updatedAt: new Date(dbInvitation.updated_at),
expiresAt: new Date(dbInvitation.expires_at),
};
}
export function organizationInvitationToDb(
invitation: InsertableOrganizationInvitation,
{
now = new Date(),
generateId = generateOrganizationInvitationId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableOrganizationInvitation {
return {
id: invitation.id ?? generateId(),
organization_id: invitation.organizationId,
email: invitation.email,
role: invitation.role,
status: invitation.status,
inviter_id: invitation.inviterId,
created_at: invitation.createdAt?.getTime() ?? now.getTime(),
updated_at: invitation.updatedAt?.getTime() ?? now.getTime(),
expires_at: invitation.expiresAt.getTime(),
};
}

View File

@@ -1,4 +1,4 @@
import type { OrganizationInvitation } from './organizations.types';
import type { OrganizationInvitation } from './organizations.tables';
import { describe, expect, test } from 'vitest';
import { ORGANIZATION_INVITATION_STATUS } from './organizations.constants';
import { ensureInvitationStatus } from './organizations.repository.models';

View File

@@ -1,4 +1,4 @@
import type { OrganizationInvitation } from './organizations.types';
import type { OrganizationInvitation } from './organizations.tables';
import { isAfter } from 'date-fns';
import { ORGANIZATION_INVITATION_STATUS } from './organizations.constants';

View File

@@ -46,7 +46,7 @@ describe('organizations repository', () => {
await organizationsRepository.updateExpiredPendingInvitationsStatus({ now: new Date('2025-05-13') });
const invitations = await db.select().from(organizationInvitationsTable).orderBy(organizationInvitationsTable.id);
const invitations = await db.selectFrom('organization_invitations').selectAll().orderBy('id', 'asc').execute();
expect(invitations).to.eql([
{
@@ -97,10 +97,10 @@ describe('organizations repository', () => {
await organizationsRepository.deleteAllMembersFromOrganization({ organizationId: 'org_1' });
const remainingMembers = await db.select().from(organizationMembersTable);
const remainingMembers = await db.selectFrom('organization_members').selectAll().execute();
expect(remainingMembers).to.have.lengthOf(1);
expect(remainingMembers[0]?.organizationId).to.equal('org_2');
expect(remainingMembers[0]?.organization_id).to.equal('org_2');
});
});
@@ -147,10 +147,10 @@ describe('organizations repository', () => {
await organizationsRepository.deleteAllOrganizationInvitations({ organizationId: 'org_1' });
const remainingInvitations = await db.select().from(organizationInvitationsTable);
const remainingInvitations = await db.selectFrom('organization_invitations').selectAll().execute();
expect(remainingInvitations).to.have.lengthOf(1);
expect(remainingInvitations[0]?.organizationId).to.equal('org_2');
expect(remainingInvitations[0]?.organization_id).to.equal('org_2');
});
});
@@ -175,11 +175,11 @@ describe('organizations repository', () => {
purgeDaysDelay: 30,
});
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute();
expect(organization?.deletedAt).to.eql(now);
expect(organization?.deletedBy).to.equal('user_1');
expect(organization?.scheduledPurgeAt).to.eql(expectedPurgeDate);
expect(organization?.deleted_at).to.eql(now.getTime());
expect(organization?.deleted_by).to.equal('user_1');
expect(organization?.scheduled_purge_at).to.eql(expectedPurgeDate.getTime());
});
test('uses default purge delay of 30 days when not specified', async () => {
@@ -201,9 +201,9 @@ describe('organizations repository', () => {
now,
});
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute();
expect(organization?.scheduledPurgeAt).to.eql(expectedPurgeDate);
expect(organization?.scheduled_purge_at).to.eql(expectedPurgeDate.getTime());
});
});
});

View File

@@ -1,19 +1,18 @@
import type { Database } from '../app/database/database.types';
import type { DbInsertableOrganization, OrganizationInvitationStatus, OrganizationRole } from './organizations.types';
import type { DatabaseClient } from '../app/database/database.types';
import type { OrganizationInvitationStatus, OrganizationRole } from './organizations.types';
import { injectArguments } from '@corentinth/chisels';
import { addDays, startOfDay } from 'date-fns';
import { and, count, eq, getTableColumns, gte, isNotNull, isNull, lte } from 'drizzle-orm';
import { omit } from 'lodash-es';
import { sql } from 'kysely';
import { omitUndefined } from '../shared/utils';
import { usersTable } from '../users/users.table';
import { ORGANIZATION_INVITATION_STATUS, ORGANIZATION_ROLES } from './organizations.constants';
import { createOrganizationNotFoundError } from './organizations.errors';
import { ensureInvitationStatus } from './organizations.repository.models';
import { organizationInvitationsTable, organizationMembersTable, organizationsTable } from './organizations.table';
import { dbToOrganization, dbToOrganizationInvitation, dbToOrganizationMember, organizationInvitationToDb, organizationMemberToDb, organizationToDb } from './organizations.models';
import type { DbInsertableOrganization } from './organizations.tables';
export type OrganizationsRepository = ReturnType<typeof createOrganizationsRepository>;
export function createOrganizationsRepository({ db }: { db: Database }) {
export function createOrganizationsRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
saveOrganization,
@@ -54,220 +53,281 @@ export function createOrganizationsRepository({ db }: { db: Database }) {
);
}
async function saveOrganization({ organization: organizationToInsert, db }: { organization: DbInsertableOrganization; db: Database }) {
const [organization] = await db.insert(organizationsTable).values(organizationToInsert).returning();
async function saveOrganization({ organization: organizationToInsert, db }: { organization: DbInsertableOrganization; db: DatabaseClient }) {
const dbOrganization = await db
.insertInto('organizations')
.values(organizationToInsert)
.returningAll()
.executeTakeFirst();
if (!organization) {
if (!dbOrganization) {
// This should never happen, as the database should always return the inserted organization
// guard for type safety
throw new Error('Failed to save organization');
}
return { organization };
}
async function getUserOrganizations({ userId, db }: { userId: string; db: Database }) {
const organizations = await db
.select({
organization: getTableColumns(organizationsTable),
})
.from(organizationsTable)
.leftJoin(organizationMembersTable, eq(organizationsTable.id, organizationMembersTable.organizationId))
.where(and(
eq(organizationMembersTable.userId, userId),
isNull(organizationsTable.deletedAt),
));
return {
organizations: organizations.map(({ organization }) => organization),
};
}
async function addUserToOrganization({ userId, organizationId, role, db }: { userId: string; organizationId: string; role: OrganizationRole; db: Database }) {
await db.insert(organizationMembersTable).values({ userId, organizationId, role });
}
async function isUserInOrganization({ userId, organizationId, db }: { userId: string; organizationId: string; db: Database }) {
const organizationUser = await db
.select()
.from(organizationMembersTable)
.where(and(
eq(organizationMembersTable.userId, userId),
eq(organizationMembersTable.organizationId, organizationId),
));
return {
isInOrganization: organizationUser.length > 0,
};
}
async function updateOrganization({ organizationId, organization: organizationToUpdate, db }: { organizationId: string; organization: { name?: string; customerId?: string }; db: Database }) {
const [organization] = await db
.update(organizationsTable)
.set(omitUndefined(organizationToUpdate))
.where(eq(organizationsTable.id, organizationId))
.returning();
const organization = dbToOrganization(dbOrganization);
return { organization };
}
async function deleteOrganization({ organizationId, db }: { organizationId: string; db: Database }) {
await db.delete(organizationsTable).where(eq(organizationsTable.id, organizationId));
}
async function getOrganizationById({ organizationId, db }: { organizationId: string; db: Database }) {
const [organization] = await db
.select()
.from(organizationsTable)
.where(eq(organizationsTable.id, organizationId));
async function getUserOrganizations({ userId, db }: { userId: string; db: DatabaseClient }) {
const dbOrganizations = await db
.selectFrom('organizations')
.innerJoin('organization_members', 'organizations.id', 'organization_members.organization_id')
.where('organization_members.user_id', '=', userId)
.where('organizations.deleted_at', 'is', null)
.select([
'organizations.id',
'organizations.name',
'organizations.customer_id',
'organizations.deleted_at',
'organizations.deleted_by',
'organizations.scheduled_purge_at',
'organizations.created_at',
'organizations.updated_at',
])
.execute();
return {
organization,
organizations: dbOrganizations.map(dbOrg => dbToOrganization(dbOrg)).filter((org): org is NonNullable<typeof org> => org !== undefined),
};
}
async function getUserOwnedOrganizationCount({ userId, db }: { userId: string; db: Database }) {
const [record] = await db
.select({
organizationCount: count(organizationMembersTable.id),
})
.from(organizationMembersTable)
.where(
and(
eq(organizationMembersTable.userId, userId),
eq(organizationMembersTable.role, ORGANIZATION_ROLES.OWNER),
),
);
async function addUserToOrganization({ userId, organizationId, role, db }: { userId: string; organizationId: string; role: OrganizationRole; db: DatabaseClient }) {
await db
.insertInto('organization_members')
.values(organizationMemberToDb({ userId, organizationId, role }))
.execute();
}
if (!record) {
async function isUserInOrganization({ userId, organizationId, db }: { userId: string; organizationId: string; db: DatabaseClient }) {
const member = await db
.selectFrom('organization_members')
.where('user_id', '=', userId)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
return {
isInOrganization: member !== undefined,
};
}
async function updateOrganization({ organizationId, organization: organizationToUpdate, db }: { organizationId: string; organization: { name?: string; customerId?: string }; db: DatabaseClient }) {
const updateValues = omitUndefined({
name: organizationToUpdate.name,
customer_id: organizationToUpdate.customerId,
});
const dbOrganization = await db
.updateTable('organizations')
.set(updateValues)
.where('id', '=', organizationId)
.returningAll()
.executeTakeFirst();
return { organization: dbToOrganization(dbOrganization) };
}
async function deleteOrganization({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
await db
.deleteFrom('organizations')
.where('id', '=', organizationId)
.execute();
}
async function getOrganizationById({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const dbOrganization = await db
.selectFrom('organizations')
.where('id', '=', organizationId)
.selectAll()
.executeTakeFirst();
return {
organization: dbToOrganization(dbOrganization),
};
}
async function getUserOwnedOrganizationCount({ userId, db }: { userId: string; db: DatabaseClient }) {
const result = await db
.selectFrom('organization_members')
.select(sql<number>`count(*)`.as('organization_count'))
.where('user_id', '=', userId)
.where('role', '=', ORGANIZATION_ROLES.OWNER)
.executeTakeFirst();
if (!result) {
throw createOrganizationNotFoundError();
}
const { organizationCount } = record;
const organizationCount = result.organization_count;
return {
organizationCount,
};
}
async function getOrganizationOwner({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({
organizationOwner: getTableColumns(usersTable),
})
.from(usersTable)
.leftJoin(organizationMembersTable, eq(usersTable.id, organizationMembersTable.userId))
.where(
and(
eq(organizationMembersTable.organizationId, organizationId),
eq(organizationMembersTable.role, ORGANIZATION_ROLES.OWNER),
),
);
async function getOrganizationOwner({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const organizationOwner = await db
.selectFrom('users')
.innerJoin('organization_members', 'users.id', 'organization_members.user_id')
.where('organization_members.organization_id', '=', organizationId)
.where('organization_members.role', '=', ORGANIZATION_ROLES.OWNER)
.select([
'users.id',
'users.email',
'users.email_verified',
'users.name',
'users.image',
'users.max_organization_count',
'users.created_at',
'users.updated_at',
])
.executeTakeFirst();
if (!record) {
if (!organizationOwner) {
throw createOrganizationNotFoundError();
}
const { organizationOwner } = record;
return { organizationOwner };
}
async function getOrganizationMembersCount({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({
membersCount: count(organizationMembersTable.id),
})
.from(organizationMembersTable)
.where(
eq(organizationMembersTable.organizationId, organizationId),
);
async function getOrganizationMembersCount({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const result = await db
.selectFrom('organization_members')
.select(sql<number>`count(*)`.as('members_count'))
.where('organization_id', '=', organizationId)
.executeTakeFirst();
if (!record) {
if (!result) {
throw createOrganizationNotFoundError();
}
const { membersCount } = record;
const membersCount = result.members_count;
return {
membersCount,
};
}
async function getAllOrganizationIds({ db }: { db: Database }) {
const organizationIds = await db.select({ id: organizationsTable.id }).from(organizationsTable);
async function getAllOrganizationIds({ db }: { db: DatabaseClient }) {
const results = await db
.selectFrom('organizations')
.select('id')
.execute();
return {
organizationIds: organizationIds.map(({ id }) => id),
organizationIds: results.map(({ id }) => id),
};
}
async function getOrganizationMembers({ organizationId, db }: { organizationId: string; db: Database }) {
const members = await db
.select()
.from(organizationMembersTable)
.leftJoin(usersTable, eq(organizationMembersTable.userId, usersTable.id))
.where(
eq(organizationMembersTable.organizationId, organizationId),
);
async function getOrganizationMembers({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const results = await db
.selectFrom('organization_members')
.leftJoin('users', 'organization_members.user_id', 'users.id')
.where('organization_members.organization_id', '=', organizationId)
.select([
'organization_members.id',
'organization_members.organization_id',
'organization_members.user_id',
'organization_members.role',
'organization_members.created_at',
'organization_members.updated_at',
'users.id as user_id_col',
'users.email',
'users.email_verified',
'users.name',
'users.image',
'users.max_organization_count',
'users.created_at as user_created_at',
'users.updated_at as user_updated_at',
])
.execute();
return {
members: members.map(({ organization_members, users }) => ({
...organization_members,
user: users,
})),
members: results.map((result) => {
const member = {
id: result.id,
organization_id: result.organization_id,
user_id: result.user_id,
role: result.role,
created_at: result.created_at,
updated_at: result.updated_at,
};
const user = result.user_id_col
? {
id: result.user_id_col,
email: result.email!,
email_verified: result.email_verified!,
name: result.name!,
image: result.image!,
max_organization_count: result.max_organization_count!,
created_at: result.user_created_at!,
updated_at: result.user_updated_at!,
}
: null;
return {
...member,
user,
};
}),
};
}
async function removeUserFromOrganization({ userId, organizationId, db }: { userId: string; organizationId: string; db: Database }) {
async function removeUserFromOrganization({ userId, organizationId, db }: { userId: string; organizationId: string; db: DatabaseClient }) {
await db
.delete(organizationMembersTable)
.where(
and(
eq(organizationMembersTable.userId, userId),
eq(organizationMembersTable.organizationId, organizationId),
),
);
.deleteFrom('organization_members')
.where('user_id', '=', userId)
.where('organization_id', '=', organizationId)
.execute();
}
async function updateOrganizationMemberRole({ memberId, role, db }: { memberId: string; role: OrganizationRole; db: Database }) {
const [updatedMember] = await db
.update(organizationMembersTable)
async function updateOrganizationMemberRole({ memberId, role, db }: { memberId: string; role: OrganizationRole; db: DatabaseClient }) {
const dbMember = await db
.updateTable('organization_members')
.set({ role })
.where(
eq(organizationMembersTable.id, memberId),
)
.returning();
.where('id', '=', memberId)
.returningAll()
.executeTakeFirst();
return { member: updatedMember };
if (!dbMember) {
return { member: undefined };
}
return { member: dbToOrganizationMember(dbMember) };
}
async function getOrganizationMemberByUserId({ userId, organizationId, db }: { userId: string; organizationId: string; db: Database }) {
const [member] = await db
.select()
.from(organizationMembersTable)
.where(
and(
eq(organizationMembersTable.userId, userId),
eq(organizationMembersTable.organizationId, organizationId),
),
);
async function getOrganizationMemberByUserId({ userId, organizationId, db }: { userId: string; organizationId: string; db: DatabaseClient }) {
const dbMember = await db
.selectFrom('organization_members')
.where('user_id', '=', userId)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
return { member };
if (!dbMember) {
return { member: undefined };
}
return { member: dbToOrganizationMember(dbMember) };
}
async function getOrganizationMemberByMemberId({ memberId, organizationId, db }: { memberId: string; organizationId: string; db: Database }) {
const [member] = await db
.select()
.from(organizationMembersTable)
.where(
and(
eq(organizationMembersTable.id, memberId),
eq(organizationMembersTable.organizationId, organizationId),
),
);
async function getOrganizationMemberByMemberId({ memberId, organizationId, db }: { memberId: string; organizationId: string; db: DatabaseClient }) {
const dbMember = await db
.selectFrom('organization_members')
.where('id', '=', memberId)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
return { member };
if (!dbMember) {
return { member: undefined };
}
return { member: dbToOrganizationMember(dbMember) };
}
async function saveOrganizationInvitation({
@@ -283,259 +343,298 @@ async function saveOrganizationInvitation({
email: string;
role: OrganizationRole;
inviterId: string;
db: Database;
db: DatabaseClient;
expirationDelayDays?: number;
now?: Date;
}) {
const [organizationInvitation] = await db
.insert(organizationInvitationsTable)
.values({
const dbInvitation = await db
.insertInto('organization_invitations')
.values(organizationInvitationToDb({
organizationId,
email,
role,
inviterId,
status: ORGANIZATION_INVITATION_STATUS.PENDING,
expiresAt: addDays(now, expirationDelayDays),
})
.returning();
}, { now }))
.returningAll()
.executeTakeFirst();
return { organizationInvitation };
return { organizationInvitation: dbToOrganizationInvitation(dbInvitation) };
}
async function getTodayUserInvitationCount({ userId, db, now = new Date() }: { userId: string; db: Database; now?: Date }) {
const [record] = await db
.select({
userInvitationCount: count(organizationInvitationsTable.id),
})
.from(organizationInvitationsTable)
.where(
and(
eq(organizationInvitationsTable.inviterId, userId),
gte(organizationInvitationsTable.createdAt, startOfDay(now)),
),
);
async function getTodayUserInvitationCount({ userId, db, now = new Date() }: { userId: string; db: DatabaseClient; now?: Date }) {
const result = await db
.selectFrom('organization_invitations')
.select(sql<number>`count(*)`.as('user_invitation_count'))
.where('inviter_id', '=', userId)
.where('created_at', '>=', startOfDay(now).getTime())
.executeTakeFirst();
if (!record) {
if (!result) {
throw createOrganizationNotFoundError();
}
const { userInvitationCount } = record;
const userInvitationCount = result.user_invitation_count;
return {
userInvitationCount,
};
}
async function getPendingOrganizationInvitationsForEmail({ email, db, now = new Date() }: { email: string; db: Database; now?: Date }) {
const rawInvitations = await db
.select()
.from(organizationInvitationsTable)
.leftJoin(organizationsTable, eq(organizationInvitationsTable.organizationId, organizationsTable.id))
.where(
and(
eq(organizationInvitationsTable.email, email),
eq(organizationInvitationsTable.status, ORGANIZATION_INVITATION_STATUS.PENDING),
// To ensure we don't count just expired invitations that haven't been marked as expired yet
gte(organizationInvitationsTable.expiresAt, now),
),
);
async function getPendingOrganizationInvitationsForEmail({ email, db, now = new Date() }: { email: string; db: DatabaseClient; now?: Date }) {
const results = await db
.selectFrom('organization_invitations')
.leftJoin('organizations', 'organization_invitations.organization_id', 'organizations.id')
.where('organization_invitations.email', '=', email)
.where('organization_invitations.status', '=', ORGANIZATION_INVITATION_STATUS.PENDING)
.where('organization_invitations.expires_at', '>=', now.getTime())
.select([
'organization_invitations.id',
'organization_invitations.organization_id',
'organization_invitations.email',
'organization_invitations.role',
'organization_invitations.status',
'organization_invitations.inviter_id',
'organization_invitations.expires_at',
'organization_invitations.created_at',
'organization_invitations.updated_at',
'organizations.id as org_id',
'organizations.name',
'organizations.customer_id',
'organizations.deleted_at',
'organizations.deleted_by',
'organizations.scheduled_purge_at',
'organizations.created_at as org_created_at',
'organizations.updated_at as org_updated_at',
])
.execute();
const invitations = rawInvitations.map(({ organization_invitations, organizations }) => ({
...omit(organization_invitations, ''),
organization: organizations,
}));
const invitations = results.map((result) => {
const invitation = {
id: result.id,
organization_id: result.organization_id,
email: result.email,
role: result.role,
status: result.status,
inviter_id: result.inviter_id,
expires_at: result.expires_at,
created_at: result.created_at,
updated_at: result.updated_at,
};
const organization = result.org_id
? {
id: result.org_id,
name: result.name!,
customer_id: result.customer_id!,
deleted_at: result.deleted_at!,
deleted_by: result.deleted_by!,
scheduled_purge_at: result.scheduled_purge_at!,
created_at: result.org_created_at!,
updated_at: result.org_updated_at!,
}
: null;
return {
...invitation,
organization,
};
});
return {
invitations,
};
}
async function getOrganizationInvitationById({ invitationId, db, now = new Date() }: { invitationId: string; db: Database; now?: Date }) {
const [invitation] = await db
.select()
.from(organizationInvitationsTable)
.where(
eq(organizationInvitationsTable.id, invitationId),
);
async function getOrganizationInvitationById({ invitationId, db, now = new Date() }: { invitationId: string; db: DatabaseClient; now?: Date }) {
const dbInvitation = await db
.selectFrom('organization_invitations')
.where('id', '=', invitationId)
.selectAll()
.executeTakeFirst();
const invitation = dbToOrganizationInvitation(dbInvitation);
return {
invitation: ensureInvitationStatus({ invitation, now }),
};
}
async function updateOrganizationInvitation({ invitationId, status, expiresAt, db }: { invitationId: string; status: OrganizationInvitationStatus; expiresAt?: Date; db: Database }) {
async function updateOrganizationInvitation({ invitationId, status, expiresAt, db }: { invitationId: string; status: OrganizationInvitationStatus; expiresAt?: Date; db: DatabaseClient }) {
await db
.update(organizationInvitationsTable)
.updateTable('organization_invitations')
.set(omitUndefined({
status,
expiresAt,
expires_at: expiresAt?.getTime(),
}))
.where(
eq(organizationInvitationsTable.id, invitationId),
);
.where('id', '=', invitationId)
.execute();
}
async function getPendingInvitationsCount({ email, db, now = new Date() }: { email: string; db: Database; now?: Date }) {
const [record] = await db
.select({
pendingInvitationsCount: count(organizationInvitationsTable.id),
})
.from(organizationInvitationsTable)
.where(
and(
eq(organizationInvitationsTable.email, email),
eq(organizationInvitationsTable.status, ORGANIZATION_INVITATION_STATUS.PENDING),
// To ensure we don't count just expired invitations that haven't been marked as expired yet
gte(organizationInvitationsTable.expiresAt, now),
),
);
async function getPendingInvitationsCount({ email, db, now = new Date() }: { email: string; db: DatabaseClient; now?: Date }) {
const result = await db
.selectFrom('organization_invitations')
.select(sql<number>`count(*)`.as('pending_invitations_count'))
.where('email', '=', email)
.where('status', '=', ORGANIZATION_INVITATION_STATUS.PENDING)
.where('expires_at', '>=', now.getTime())
.executeTakeFirst();
if (!record) {
if (!result) {
throw createOrganizationNotFoundError();
}
const { pendingInvitationsCount } = record;
const pendingInvitationsCount = result.pending_invitations_count;
return {
pendingInvitationsCount,
};
}
async function getInvitationForEmailAndOrganization({ email, organizationId, db, now = new Date() }: { email: string; organizationId: string; db: Database; now?: Date }) {
const [invitation] = await db
.select()
.from(organizationInvitationsTable)
.where(
and(
eq(organizationInvitationsTable.email, email),
eq(organizationInvitationsTable.organizationId, organizationId),
),
);
async function getInvitationForEmailAndOrganization({ email, organizationId, db, now = new Date() }: { email: string; organizationId: string; db: DatabaseClient; now?: Date }) {
const dbInvitation = await db
.selectFrom('organization_invitations')
.where('email', '=', email)
.where('organization_id', '=', organizationId)
.selectAll()
.executeTakeFirst();
const invitation = dbToOrganizationInvitation(dbInvitation);
return {
invitation: ensureInvitationStatus({ invitation, now }),
};
}
async function getOrganizationMemberByEmail({ email, organizationId, db }: { email: string; organizationId: string; db: Database }) {
const [rawMember] = await db
.select()
.from(organizationMembersTable)
.leftJoin(usersTable, eq(organizationMembersTable.userId, usersTable.id))
.where(
and(
eq(usersTable.email, email),
eq(organizationMembersTable.organizationId, organizationId),
),
);
async function getOrganizationMemberByEmail({ email, organizationId, db }: { email: string; organizationId: string; db: DatabaseClient }) {
const dbMember = await db
.selectFrom('organization_members')
.innerJoin('users', 'organization_members.user_id', 'users.id')
.where('users.email', '=', email)
.where('organization_members.organization_id', '=', organizationId)
.select([
'organization_members.id',
'organization_members.organization_id',
'organization_members.user_id',
'organization_members.role',
'organization_members.created_at',
'organization_members.updated_at',
])
.executeTakeFirst();
if (!dbMember) {
return { member: undefined };
}
return {
member: rawMember ? rawMember.organization_members : null,
member: dbToOrganizationMember(dbMember),
};
}
async function getOrganizationInvitations({ organizationId, db, now = new Date() }: { organizationId: string; db: Database; now?: Date }) {
const invitations = await db
.select()
.from(organizationInvitationsTable)
.where(eq(organizationInvitationsTable.organizationId, organizationId));
async function getOrganizationInvitations({ organizationId, db, now = new Date() }: { organizationId: string; db: DatabaseClient; now?: Date }) {
const dbInvitations = await db
.selectFrom('organization_invitations')
.where('organization_id', '=', organizationId)
.selectAll()
.execute();
return { invitations: invitations.map(invitation => ensureInvitationStatus({ invitation, now })) };
const invitations = dbInvitations
.map(dbInv => dbToOrganizationInvitation(dbInv))
.filter((inv): inv is NonNullable<typeof inv> => inv !== undefined)
.map(invitation => ensureInvitationStatus({ invitation, now }))
.filter((inv): inv is NonNullable<typeof inv> => inv !== null);
return { invitations };
}
async function updateExpiredPendingInvitationsStatus({ db, now = new Date() }: { db: Database; now?: Date }) {
async function updateExpiredPendingInvitationsStatus({ db, now = new Date() }: { db: DatabaseClient; now?: Date }) {
await db
.update(organizationInvitationsTable)
.updateTable('organization_invitations')
.set({ status: ORGANIZATION_INVITATION_STATUS.EXPIRED })
.where(
and(
lte(organizationInvitationsTable.expiresAt, now),
eq(organizationInvitationsTable.status, ORGANIZATION_INVITATION_STATUS.PENDING),
),
);
.where('expires_at', '<=', now.getTime())
.where('status', '=', ORGANIZATION_INVITATION_STATUS.PENDING)
.execute();
}
async function getOrganizationPendingInvitationsCount({ organizationId, db }: { organizationId: string; db: Database }) {
const [record] = await db
.select({
pendingInvitationsCount: count(organizationInvitationsTable.id),
})
.from(organizationInvitationsTable)
.where(
and(
eq(organizationInvitationsTable.organizationId, organizationId),
eq(organizationInvitationsTable.status, ORGANIZATION_INVITATION_STATUS.PENDING),
),
);
async function getOrganizationPendingInvitationsCount({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const result = await db
.selectFrom('organization_invitations')
.select(sql<number>`count(*)`.as('pending_invitations_count'))
.where('organization_id', '=', organizationId)
.where('status', '=', ORGANIZATION_INVITATION_STATUS.PENDING)
.executeTakeFirst();
if (!record) {
if (!result) {
throw createOrganizationNotFoundError();
}
const { pendingInvitationsCount } = record;
const pendingInvitationsCount = result.pending_invitations_count;
return {
pendingInvitationsCount,
};
}
async function deleteAllMembersFromOrganization({ organizationId, db }: { organizationId: string; db: Database }) {
async function deleteAllMembersFromOrganization({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
await db
.delete(organizationMembersTable)
.where(eq(organizationMembersTable.organizationId, organizationId));
.deleteFrom('organization_members')
.where('organization_id', '=', organizationId)
.execute();
}
async function deleteAllOrganizationInvitations({ organizationId, db }: { organizationId: string; db: Database }) {
async function deleteAllOrganizationInvitations({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
await db
.delete(organizationInvitationsTable)
.where(eq(organizationInvitationsTable.organizationId, organizationId));
.deleteFrom('organization_invitations')
.where('organization_id', '=', organizationId)
.execute();
}
async function softDeleteOrganization({ organizationId, deletedBy, db, now = new Date(), purgeDaysDelay = 30 }: { organizationId: string; deletedBy: string; db: Database; now?: Date; purgeDaysDelay?: number }) {
async function softDeleteOrganization({ organizationId, deletedBy, db, now = new Date(), purgeDaysDelay = 30 }: { organizationId: string; deletedBy: string; db: DatabaseClient; now?: Date; purgeDaysDelay?: number }) {
await db
.update(organizationsTable)
.updateTable('organizations')
.set({
deletedAt: now,
deletedBy,
scheduledPurgeAt: addDays(now, purgeDaysDelay),
deleted_at: now.getTime(),
deleted_by: deletedBy,
scheduled_purge_at: addDays(now, purgeDaysDelay).getTime(),
})
.where(eq(organizationsTable.id, organizationId));
.where('id', '=', organizationId)
.execute();
}
async function restoreOrganization({ organizationId, db }: { organizationId: string; db: Database }) {
async function restoreOrganization({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
await db
.update(organizationsTable)
.updateTable('organizations')
.set({
deletedAt: null,
deletedBy: null,
scheduledPurgeAt: null,
deleted_at: null,
deleted_by: null,
scheduled_purge_at: null,
})
.where(eq(organizationsTable.id, organizationId));
.where('id', '=', organizationId)
.execute();
}
async function getUserDeletedOrganizations({ userId, db, now = new Date() }: { userId: string; db: Database; now?: Date }) {
const organizations = await db
.select()
.from(organizationsTable)
.where(and(
eq(organizationsTable.deletedBy, userId),
isNotNull(organizationsTable.deletedAt),
gte(organizationsTable.scheduledPurgeAt, now),
));
async function getUserDeletedOrganizations({ userId, db, now = new Date() }: { userId: string; db: DatabaseClient; now?: Date }) {
const dbOrganizations = await db
.selectFrom('organizations')
.where('deleted_by', '=', userId)
.where('deleted_at', 'is not', null)
.where('scheduled_purge_at', '>=', now.getTime())
.selectAll()
.execute();
return {
organizations,
organizations: dbOrganizations.map(dbOrg => dbToOrganization(dbOrg)).filter((org): org is NonNullable<typeof org> => org !== undefined),
};
}
async function getExpiredSoftDeletedOrganizations({ db, now = new Date() }: { db: Database; now?: Date }) {
async function getExpiredSoftDeletedOrganizations({ db, now = new Date() }: { db: DatabaseClient; now?: Date }) {
const organizations = await db
.select({ id: organizationsTable.id })
.from(organizationsTable)
.where(and(
isNotNull(organizationsTable.deletedAt),
lte(organizationsTable.scheduledPurgeAt, now),
));
.selectFrom('organizations')
.where('deleted_at', 'is not', null)
.where('scheduled_purge_at', '<=', now.getTime())
.select('id')
.execute();
return {
organizationIds: organizations.map(org => org.id),

View File

@@ -2,9 +2,11 @@ import type { NonEmptyArray } from '../shared/types';
import type { OrganizationInvitationStatus, OrganizationRole } from './organizations.types';
import { index, integer, sqliteTable, text, unique } from 'drizzle-orm/sqlite-core';
import { createPrimaryKeyField, createTimestampColumns } from '../shared/db/columns.helpers';
import { usersTable } from '../users/users.table';
import { ORGANIZATION_ID_PREFIX, ORGANIZATION_INVITATION_ID_PREFIX, ORGANIZATION_INVITATION_STATUS, ORGANIZATION_INVITATION_STATUS_LIST, ORGANIZATION_MEMBER_ID_PREFIX, ORGANIZATION_ROLES_LIST } from './organizations.constants';
// Legacy stub for Drizzle schema - this file is no longer used in production
const usersTable = { id: '' } as any;
export const organizationsTable = sqliteTable('organizations', {
...createPrimaryKeyField({ prefix: ORGANIZATION_ID_PREFIX }),
...createTimestampColumns(),

View File

@@ -0,0 +1,73 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../app/database/database.columns.types';
// --- Organizations
export type OrganizationsTable = TableWithIdAndTimestamps<{
name: string;
customer_id: string | null;
deleted_at: number | null;
deleted_by: string | null;
scheduled_purge_at: number | null;
}>;
export type DbSelectableOrganization = Selectable<OrganizationsTable>;
export type DbInsertableOrganization = Insertable<OrganizationsTable>;
export type DbUpdateableOrganization = Updateable<OrganizationsTable>;
export type InsertableOrganization = BusinessInsertable<DbInsertableOrganization, {
deletedAt?: Date | null;
scheduledPurgeAt?: Date | null;
}>;
export type Organization = Expand<CamelCaseKeys<Omit<DbSelectableOrganization, 'created_at' | 'updated_at' | 'deleted_at' | 'scheduled_purge_at'> & {
createdAt: Date;
updatedAt: Date;
deletedAt: Date | null;
scheduledPurgeAt: Date | null;
}>>;
// --- Organization Members
export type OrganizationMembersTable = TableWithIdAndTimestamps<{
organization_id: string;
user_id: string;
role: string;
}>;
export type DbSelectableOrganizationMember = Selectable<OrganizationMembersTable>;
export type DbInsertableOrganizationMember = Insertable<OrganizationMembersTable>;
export type DbUpdateableOrganizationMember = Updateable<OrganizationMembersTable>;
export type InsertableOrganizationMember = BusinessInsertable<DbInsertableOrganizationMember, {}>;
export type OrganizationMember = Expand<CamelCaseKeys<Omit<DbSelectableOrganizationMember, 'created_at' | 'updated_at' | 'role'> & {
createdAt: Date;
updatedAt: Date;
role: 'owner' | 'admin' | 'member';
}>>;
// --- Organization Invitations
export type OrganizationInvitationsTable = TableWithIdAndTimestamps<{
organization_id: string;
email: string;
role: string;
status: string;
expires_at: number;
inviter_id: string;
}>;
export type DbSelectableOrganizationInvitation = Selectable<OrganizationInvitationsTable>;
export type DbInsertableOrganizationInvitation = Insertable<OrganizationInvitationsTable>;
export type DbUpdateableOrganizationInvitation = Updateable<OrganizationInvitationsTable>;
export type InsertableOrganizationInvitation = BusinessInsertable<DbInsertableOrganizationInvitation, {
expiresAt: Date;
}>;
export type OrganizationInvitation = Expand<CamelCaseKeys<Omit<DbSelectableOrganizationInvitation, 'created_at' | 'updated_at' | 'expires_at' | 'role' | 'status'> & {
createdAt: Date;
updatedAt: Date;
expiresAt: Date;
role: 'owner' | 'admin' | 'member';
status: 'pending' | 'accepted' | 'rejected' | 'expired' | 'cancelled';
}>>;

View File

@@ -1,11 +1,5 @@
import type { Expand } from '@corentinth/chisels';
import type { ORGANIZATION_INVITATION_STATUS_LIST, ORGANIZATION_ROLES_LIST } from './organizations.constants';
import type { organizationInvitationsTable, organizationsTable } from './organizations.table';
export type DbInsertableOrganization = Expand<typeof organizationsTable.$inferInsert>;
export type OrganizationRole = typeof ORGANIZATION_ROLES_LIST[number];
export type OrganizationInvitationStatus = typeof ORGANIZATION_INVITATION_STATUS_LIST[number];
export type OrganizationInvitation = Expand<typeof organizationInvitationsTable.$inferSelect>;

View File

@@ -12,8 +12,8 @@ import { createSubscriptionsRepository } from '../subscriptions/subscriptions.re
import { createUsersRepository } from '../users/users.repository';
import { ORGANIZATION_ROLES } from './organizations.constants';
import { createMaxOrganizationMembersCountReachedError, createOrganizationHasActiveSubscriptionError, createOrganizationInvitationAlreadyExistsError, createOrganizationNotFoundError, createUserAlreadyInOrganizationError, createUserMaxOrganizationCountReachedError, createUserNotInOrganizationError, createUserNotOrganizationOwnerError, createUserOrganizationInvitationLimitReachedError } from './organizations.errors';
import { dbToOrganization } from './organizations.models';
import { createOrganizationsRepository } from './organizations.repository';
import { organizationInvitationsTable, organizationMembersTable, organizationsTable } from './organizations.table';
import { checkIfUserCanCreateNewOrganization, ensureUserIsInOrganization, ensureUserIsOwnerOfOrganization, getOrCreateOrganizationCustomerId, inviteMemberToOrganization, purgeExpiredSoftDeletedOrganization, purgeExpiredSoftDeletedOrganizations, removeMemberFromOrganization, softDeleteOrganization } from './organizations.usecases';
describe('organizations usecases', () => {
@@ -100,8 +100,8 @@ describe('organizations usecases', () => {
});
// add a second organization owned by the user
await db.insert(organizationsTable).values({ id: 'organization-3', name: 'Organization 3' });
await db.insert(organizationMembersTable).values({ organizationId: 'organization-3', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER });
await db.insertInto('organizations').values({ id: 'organization-3', name: 'Organization 3', created_at: Date.now(), updated_at: Date.now() }).execute();
await db.insertInto('organization_members').values({ id: 'member-3', organization_id: 'organization-3', user_id: 'user-1', role: ORGANIZATION_ROLES.OWNER, created_at: Date.now(), updated_at: Date.now() }).execute();
// throw
await expect(
@@ -142,8 +142,8 @@ describe('organizations usecases', () => {
});
// add a third organization owned by the user
await db.insert(organizationsTable).values({ id: 'organization-3', name: 'Organization 3' });
await db.insert(organizationMembersTable).values({ organizationId: 'organization-3', userId: 'user-1', role: ORGANIZATION_ROLES.OWNER });
await db.insertInto('organizations').values({ id: 'organization-3', name: 'Organization 3', created_at: Date.now(), updated_at: Date.now() }).execute();
await db.insertInto('organization_members').values({ id: 'member-3', organization_id: 'organization-3', user_id: 'user-1', role: ORGANIZATION_ROLES.OWNER, created_at: Date.now(), updated_at: Date.now() }).execute();
// throw
await expect(
@@ -307,7 +307,7 @@ describe('organizations usecases', () => {
organizationsRepository,
});
const remainingMembers = await db.select().from(organizationMembersTable);
const remainingMembers = await db.selectFrom('organization_members').selectAll().execute();
expect(remainingMembers.length).to.equal(1);
expect(remainingMembers[0]?.id).to.equal('member-1');
@@ -884,7 +884,7 @@ describe('organizations usecases', () => {
});
// Verify invitation was saved in database
const invitations = await db.select().from(organizationInvitationsTable);
const invitations = await db.selectFrom('organization_invitations').selectAll().execute();
expect(invitations).toHaveLength(1);
expect(invitations[0]).toMatchObject({
email: 'new-member@example.com',
@@ -980,7 +980,7 @@ describe('organizations usecases', () => {
now: new Date('2025-10-05'),
});
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute().then(rows => rows.map(row => dbToOrganization(row)));
expect(organization?.deletedAt).to.eql(new Date('2025-10-05'));
expect(organization?.deletedBy).to.eql('usr_1');
expect(organization?.scheduledPurgeAt).to.eql(new Date('2025-11-04'));
@@ -1045,9 +1045,8 @@ describe('organizations usecases', () => {
config,
});
const remainingMembers = await db.select().from(organizationMembersTable);
const remainingInvitations = await db.select().from(organizationInvitationsTable);
const remainingMembers = await db.selectFrom('organization_members').selectAll().execute();
const remainingInvitations = await db.selectFrom('organization_invitations').selectAll().execute();
expect(remainingMembers).toHaveLength(0);
expect(remainingInvitations).toHaveLength(0);
});
@@ -1146,7 +1145,7 @@ describe('organizations usecases', () => {
).rejects.toThrow(createOrganizationHasActiveSubscriptionError());
// Organization should not be deleted
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute();
expect(organization?.deletedAt).to.eql(null);
});
@@ -1186,7 +1185,7 @@ describe('organizations usecases', () => {
});
// Organization should be deleted
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute();
expect(organization?.deletedAt).to.eql(new Date('2025-10-05'));
expect(organization?.deletedBy).to.eql('usr_1');
});
@@ -1226,7 +1225,7 @@ describe('organizations usecases', () => {
now: new Date('2025-10-05'),
});
const [organization] = await db.select().from(organizationsTable);
const [organization] = await db.selectFrom('organizations').selectAll().execute();
expect(organization?.deletedAt).to.eql(new Date('2025-10-05'));
expect(organization?.deletedBy).to.eql('usr_1');
});

View File

@@ -32,10 +32,14 @@ import {
createUserNotOrganizationOwnerError,
createUserOrganizationInvitationLimitReachedError,
} from './organizations.errors';
import { canUserRemoveMemberFromOrganization } from './organizations.models';
import { canUserRemoveMemberFromOrganization, organizationToDb } from './organizations.models';
export async function createOrganization({ name, userId, organizationsRepository }: { name: string; userId: string; organizationsRepository: OrganizationsRepository }) {
const { organization } = await organizationsRepository.saveOrganization({ organization: { name } });
const { organization } = await organizationsRepository.saveOrganization({ organization: organizationToDb({ name }) });
if (!organization) {
throw new Error('Failed to create organization');
}
await organizationsRepository.addUserToOrganization({
userId,

View File

@@ -1,4 +1,4 @@
import type { Database } from '../../app/database/database.types';
import type { DatabaseClient } from '../../app/database/database.types';
import type { Config } from '../../config/config.types';
import type { TaskServices } from '../../tasks/tasks.services';
import { createLogger } from '../../shared/logger/logger';
@@ -6,7 +6,7 @@ import { createOrganizationsRepository } from '../organizations.repository';
const logger = createLogger({ namespace: 'organizations:tasks:expireInvitations' });
export async function registerExpireInvitationsTask({ taskServices, db, config }: { taskServices: TaskServices; db: Database; config: Config }) {
export async function registerExpireInvitationsTask({ taskServices, db, config }: { taskServices: TaskServices; db: DatabaseClient; config: Config }) {
const taskName = 'expire-invitations';
const { cron, runOnStartup } = config.tasks.expireInvitations;

View File

@@ -1,4 +1,4 @@
import type { Database } from '../../app/database/database.types';
import type { DatabaseClient } from '../../app/database/database.types';
import type { Config } from '../../config/config.types';
import type { DocumentStorageService } from '../../documents/storage/documents.storage.services';
import type { TaskServices } from '../../tasks/tasks.services';
@@ -9,7 +9,7 @@ import { purgeExpiredSoftDeletedOrganizations } from '../organizations.usecases'
const logger = createLogger({ namespace: 'organizations:tasks:purgeExpiredOrganizations' });
export async function registerPurgeExpiredOrganizationsTask({ taskServices, db, config, documentsStorageService }: { taskServices: TaskServices; db: Database; config: Config; documentsStorageService: DocumentStorageService }) {
export async function registerPurgeExpiredOrganizationsTask({ taskServices, db, config, documentsStorageService }: { taskServices: TaskServices; db: DatabaseClient; config: Config; documentsStorageService: DocumentStorageService }) {
const taskName = 'purge-expired-organizations';
const { cron, runOnStartup } = config.tasks.purgeExpiredOrganizations;

View File

@@ -0,0 +1,38 @@
import type { DbInsertableUserRole, DbSelectableUserRole, InsertableUserRole, UserRole } from './roles.tables';
import { generateId } from '../shared/random/ids';
const userRoleIdPrefix = 'rol';
const generateUserRoleId = () => generateId({ prefix: userRoleIdPrefix });
export function dbToUserRole(dbUserRole?: DbSelectableUserRole): UserRole | undefined {
if (!dbUserRole) {
return undefined;
}
return {
id: dbUserRole.id,
userId: dbUserRole.user_id,
role: dbUserRole.role,
createdAt: new Date(dbUserRole.created_at),
updatedAt: new Date(dbUserRole.updated_at),
};
}
export function userRoleToDb(
userRole: InsertableUserRole,
{
now = new Date(),
generateId = generateUserRoleId,
}: {
now?: Date;
generateId?: () => string;
} = {},
): DbInsertableUserRole {
return {
id: userRole.id ?? generateId(),
user_id: userRole.userId,
role: userRole.role,
created_at: userRole.createdAt?.getTime() ?? now.getTime(),
updated_at: userRole.updatedAt?.getTime() ?? now.getTime(),
};
}

View File

@@ -1,12 +1,11 @@
import type { Database } from '../app/database/database.types';
import type { DatabaseClient } from '../app/database/database.types';
import { injectArguments } from '@corentinth/chisels';
import { eq } from 'drizzle-orm';
import { map } from 'lodash-es';
import { userRolesTable } from './roles.table';
import { dbToUserRole } from './roles.models';
export type RolesRepository = ReturnType<typeof createRolesRepository>;
export function createRolesRepository({ db }: { db: Database }) {
export function createRolesRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
getUserRoles,
@@ -15,8 +14,14 @@ export function createRolesRepository({ db }: { db: Database }) {
);
}
async function getUserRoles({ userId, db }: { userId: string; db: Database }) {
const roles = await db.select().from(userRolesTable).where(eq(userRolesTable.userId, userId));
async function getUserRoles({ userId, db }: { userId: string; db: DatabaseClient }) {
const dbRoles = await db
.selectFrom('user_roles')
.where('user_id', '=', userId)
.selectAll()
.execute();
const roles = dbRoles.map(dbRole => dbToUserRole(dbRole)).filter((role): role is NonNullable<typeof role> => role !== undefined);
return {
roles: map(roles, 'role'),

View File

@@ -1,7 +1,9 @@
import type { Role } from './roles.types';
import { index, sqliteTable, text, unique } from 'drizzle-orm/sqlite-core';
import { createPrimaryKeyField, createTimestampColumns } from '../shared/db/columns.helpers';
import { usersTable } from '../users/users.table';
// Legacy stub for Drizzle schema - this file is no longer used in production
const usersTable = { id: '' } as any;
export const userRolesTable = sqliteTable(
'user_roles',

View File

@@ -0,0 +1,18 @@
import type { Expand } from '@corentinth/chisels';
import type { Insertable, Selectable, Updateable } from 'kysely';
import type { BusinessInsertable, CamelCaseKeys, TableWithIdAndTimestamps } from '../app/database/database.columns.types';
export type UserRolesTable = TableWithIdAndTimestamps<{
user_id: string;
role: string;
}>;
export type DbSelectableUserRole = Selectable<UserRolesTable>;
export type DbInsertableUserRole = Insertable<UserRolesTable>;
export type DbUpdateableUserRole = Updateable<UserRolesTable>;
export type InsertableUserRole = BusinessInsertable<DbInsertableUserRole, {}>;
export type UserRole = Expand<CamelCaseKeys<Omit<DbSelectableUserRole, 'created_at' | 'updated_at'> & {
createdAt: Date;
updatedAt: Date;
}>>;

View File

@@ -1,4 +1,10 @@
import type { Subscription } from './subscriptions.types';
import type {
DbInsertableOrganizationSubscription,
DbSelectableOrganizationSubscription,
InsertableOrganizationSubscription,
OrganizationSubscription,
} from './subscriptions.new.tables';
import { isNil, isNonEmptyString } from '../shared/utils';
export function coerceStripeTimestampToDate(timestamp: number) {
@@ -57,3 +63,48 @@ export function doesSubscriptionBlockDeletion(subscription: Subscription | null
// All other subscription statuses block deletion
return true;
}
// DB <-> Business model transformers
export function dbToOrganizationSubscription(dbSubscription?: DbSelectableOrganizationSubscription): OrganizationSubscription | undefined {
if (!dbSubscription) {
return undefined;
}
return {
id: dbSubscription.id,
customerId: dbSubscription.customer_id,
organizationId: dbSubscription.organization_id,
planId: dbSubscription.plan_id,
status: dbSubscription.status,
seatsCount: dbSubscription.seats_count,
cancelAtPeriodEnd: dbSubscription.cancel_at_period_end === 1,
createdAt: new Date(dbSubscription.created_at),
updatedAt: new Date(dbSubscription.updated_at),
currentPeriodEnd: new Date(dbSubscription.current_period_end),
currentPeriodStart: new Date(dbSubscription.current_period_start),
};
}
export function organizationSubscriptionToDb(
subscription: InsertableOrganizationSubscription,
{
now = new Date(),
}: {
now?: Date;
} = {},
): DbInsertableOrganizationSubscription {
return {
id: subscription.id,
customer_id: subscription.customerId,
organization_id: subscription.organizationId,
plan_id: subscription.planId,
status: subscription.status,
seats_count: subscription.seatsCount,
cancel_at_period_end: subscription.cancelAtPeriodEnd === true ? 1 : 0,
created_at: subscription.createdAt?.getTime() ?? now.getTime(),
updated_at: subscription.updatedAt?.getTime() ?? now.getTime(),
current_period_end: subscription.currentPeriodEnd.getTime(),
current_period_start: subscription.currentPeriodStart.getTime(),
};
}

View File

@@ -0,0 +1,36 @@
import type { Expand } from '@corentinth/chisels';
import type { ColumnType, Insertable, Selectable, Updateable } from 'kysely';
import type { CamelCaseKeys, WithTimestamps } from '../app/database/database.columns.types';
// Note: This table uses Stripe subscription ID as primary key, not auto-generated
export type OrganizationSubscriptionsTable = WithTimestamps<{
id: ColumnType<string, string, string>;
customer_id: string;
organization_id: string;
plan_id: string;
status: string;
seats_count: number;
current_period_end: number;
current_period_start: number;
cancel_at_period_end: number;
}>;
export type DbSelectableOrganizationSubscription = Selectable<OrganizationSubscriptionsTable>;
export type DbInsertableOrganizationSubscription = Insertable<OrganizationSubscriptionsTable>;
export type DbUpdateableOrganizationSubscription = Updateable<OrganizationSubscriptionsTable>;
export type InsertableOrganizationSubscription = Expand<CamelCaseKeys<Omit<DbInsertableOrganizationSubscription, 'created_at' | 'updated_at' | 'current_period_end' | 'current_period_start' | 'cancel_at_period_end'>> & {
createdAt?: Date;
updatedAt?: Date;
currentPeriodEnd: Date;
currentPeriodStart: Date;
cancelAtPeriodEnd?: boolean;
}>;
export type OrganizationSubscription = Expand<CamelCaseKeys<Omit<DbSelectableOrganizationSubscription, 'created_at' | 'updated_at' | 'current_period_end' | 'current_period_start' | 'cancel_at_period_end'> & {
createdAt: Date;
updatedAt: Date;
currentPeriodEnd: Date;
currentPeriodStart: Date;
cancelAtPeriodEnd: boolean;
}>>;

View File

@@ -1,13 +1,12 @@
import type { Database } from '../app/database/database.types';
import type { DbInsertableSubscription } from './subscriptions.types';
import type { DatabaseClient } from '../app/database/database.types';
import { injectArguments } from '@corentinth/chisels';
import { and, eq, inArray } from 'drizzle-orm';
import { omitUndefined } from '../shared/utils';
import { organizationSubscriptionsTable } from './subscriptions.tables';
import { dbToOrganizationSubscription, organizationSubscriptionToDb } from './subscriptions.models';
import type { DbInsertableOrganizationSubscription } from './subscriptions.new.tables';
export type SubscriptionsRepository = ReturnType<typeof createSubscriptionsRepository>;
export function createSubscriptionsRepository({ db }: { db: Database }) {
export function createSubscriptionsRepository({ db }: { db: DatabaseClient }) {
return injectArguments(
{
getActiveOrganizationSubscription,
@@ -22,68 +21,73 @@ export function createSubscriptionsRepository({ db }: { db: Database }) {
);
}
async function getActiveOrganizationSubscription({ organizationId, db }: { organizationId: string; db: Database }) {
async function getActiveOrganizationSubscription({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
// Allowlist approach: explicitly include only statuses that grant access
// - active: paid and active subscription
// - trialing: in trial period (has access)
// - past_due: payment failed but still has access during grace period
const [subscription] = await db
.select()
.from(organizationSubscriptionsTable)
.where(
and(
eq(organizationSubscriptionsTable.organizationId, organizationId),
inArray(organizationSubscriptionsTable.status, ['active', 'trialing', 'past_due']),
),
);
const dbSubscription = await db
.selectFrom('organization_subscriptions')
.where('organization_id', '=', organizationId)
.where('status', 'in', ['active', 'trialing', 'past_due'])
.selectAll()
.executeTakeFirst();
const subscription = dbToOrganizationSubscription(dbSubscription);
return { subscription };
}
async function getAllOrganizationSubscriptions({ organizationId, db }: { organizationId: string; db: Database }) {
const subscriptions = await db
.select()
.from(organizationSubscriptionsTable)
.where(
eq(organizationSubscriptionsTable.organizationId, organizationId),
);
async function getAllOrganizationSubscriptions({ organizationId, db }: { organizationId: string; db: DatabaseClient }) {
const dbSubscriptions = await db
.selectFrom('organization_subscriptions')
.where('organization_id', '=', organizationId)
.selectAll()
.execute();
const subscriptions = dbSubscriptions.map(dbSub => dbToOrganizationSubscription(dbSub)).filter((sub): sub is NonNullable<typeof sub> => sub !== undefined);
return { subscriptions };
}
async function getSubscriptionById({ subscriptionId, db }: { subscriptionId: string; db: Database }) {
const [subscription] = await db
.select()
.from(organizationSubscriptionsTable)
.where(
eq(organizationSubscriptionsTable.id, subscriptionId),
);
async function getSubscriptionById({ subscriptionId, db }: { subscriptionId: string; db: DatabaseClient }) {
const dbSubscription = await db
.selectFrom('organization_subscriptions')
.where('id', '=', subscriptionId)
.selectAll()
.executeTakeFirst();
const subscription = dbToOrganizationSubscription(dbSubscription);
return { subscription };
}
async function updateSubscription({ subscriptionId, db, ...subscription }: { subscriptionId: string; db: Database } & Omit<Partial<DbInsertableSubscription>, 'id'>) {
const [updatedSubscription] = await db
.update(organizationSubscriptionsTable)
async function updateSubscription({ subscriptionId, db, ...subscription }: { subscriptionId: string; db: DatabaseClient } & Omit<Partial<DbInsertableOrganizationSubscription>, 'id'>) {
const dbUpdatedSubscription = await db
.updateTable('organization_subscriptions')
.set(omitUndefined(subscription))
.where(
eq(organizationSubscriptionsTable.id, subscriptionId),
)
.returning();
.where('id', '=', subscriptionId)
.returningAll()
.executeTakeFirst();
const updatedSubscription = dbToOrganizationSubscription(dbUpdatedSubscription);
return { updatedSubscription };
}
// cspell:ignore upserted Insertable
async function upsertSubscription({ db, ...subscription }: { db: Database } & DbInsertableSubscription) {
const [upsertedSubscription] = await db
.insert(organizationSubscriptionsTable)
async function upsertSubscription({ db, ...subscription }: { db: DatabaseClient } & DbInsertableOrganizationSubscription) {
const dbUpsertedSubscription = await db
.insertInto('organization_subscriptions')
.values(subscription)
.onConflictDoUpdate({
target: organizationSubscriptionsTable.id,
set: omitUndefined(subscription),
})
.returning();
.onConflict((oc) => oc
.column('id')
.doUpdateSet(omitUndefined(subscription)),
)
.returningAll()
.executeTakeFirst();
const upsertedSubscription = dbToOrganizationSubscription(dbUpsertedSubscription);
return { subscription: upsertedSubscription };
}

View File

@@ -7,7 +7,7 @@ import { get } from 'lodash-es';
import { createOrganizationNotFoundError } from '../organizations/organizations.errors';
import { createLogger } from '../shared/logger/logger';
import { isNil } from '../shared/utils';
import { coerceStripeTimestampToDate } from './subscriptions.models';
import { coerceStripeTimestampToDate, organizationSubscriptionToDb } from './subscriptions.models';
export async function handleStripeWebhookEvent({
event,
@@ -73,17 +73,19 @@ export async function handleStripeWebhookEvent({
const { organizationPlan } = await plansRepository.getOrganizationPlanByPriceId({ priceId: subscriptionItem.price.id });
// Upsert subscription with current state from Stripe
await subscriptionsRepository.upsertSubscription({
id: subscriptionId,
organizationId,
planId: organizationPlan.id,
seatsCount: organizationPlan.limits.maxOrganizationsMembersCount,
customerId,
status,
currentPeriodEnd,
currentPeriodStart,
cancelAtPeriodEnd,
});
await subscriptionsRepository.upsertSubscription(
organizationSubscriptionToDb({
id: subscriptionId,
organizationId,
planId: organizationPlan.id,
seatsCount: organizationPlan.limits.maxOrganizationsMembersCount,
customerId,
status,
currentPeriodEnd,
currentPeriodStart,
cancelAtPeriodEnd,
}),
);
logger.info({
subscriptionId,

Some files were not shown because too many files have changed in this diff Show More