Compare commits

..

5 Commits

Author SHA1 Message Date
Corentin Thomasset
f5d951cc82 chore(n8n): added scope in package name 2025-08-04 20:35:35 +02:00
Corentin Thomasset
47f9c5b186 refactor(n8n): updated changeset 2025-08-04 13:54:15 +02:00
Corentin Thomasset
0b97e58785 chore(n8n): added workflow file for n8n nodes 2025-08-04 13:50:43 +02:00
Corentin Thomasset
d51779aeb8 refactor(n8n): auto lint 2025-08-04 13:25:33 +02:00
Marco Mihai Condrache
8f30ec0281 feat(n8n): initial setup of n8n node package (#443)
* feat: n8n package implementation

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: typo

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: wrong requests

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: pagination

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: search

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* feat: use correct regex

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: general fixes

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: use color type

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: specs

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: result

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: file download

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* chore: changeset

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* feat: add readme

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

* fix: typo

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>

---------

Signed-off-by: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com>
2025-08-04 13:23:42 +02:00
126 changed files with 4039 additions and 1496 deletions

View File

@@ -0,0 +1,5 @@
---
"@papra/app-client": patch
---
Added diacritics and improved wording for Romanian translation

View File

@@ -0,0 +1,5 @@
---
"@papra/webhooks": minor
---
Breaking change: updated webhooks signatures and payload format to match standard-webhook spec

View File

@@ -0,0 +1,5 @@
---
"@papra/app-client": patch
---
Added feedback when an error occurs while deleting a tag

View File

@@ -0,0 +1,5 @@
---
"@papra/app-server": minor
---
The file content extraction (like OCR) is now done asynchronously by the task runner

View File

@@ -0,0 +1,5 @@
---
"@papra/app-client": patch
---
Simplified the organization intake email list

View File

@@ -0,0 +1,5 @@
---
"@papra/app-server": minor
---
Fixed the impossibility to delete a tag that has been assigned to a document

View File

@@ -0,0 +1,7 @@
---
"@papra/app-client": minor
"@papra/app-server": minor
"@papra/webhooks": minor
---
Added new webhook events: document:updated, document:tag:added, document:tag:removed

View File

@@ -0,0 +1,7 @@
---
"@papra/app-client": minor
"@papra/app-server": minor
"@papra/webhooks": minor
---
Webhooks invocation is now defered

View File

@@ -0,0 +1,5 @@
---
"@papra/lecture": minor
---
Added support for scanned pdf content extraction

View File

@@ -0,0 +1,5 @@
---
"@papra/app-client": patch
---
Added Italian (it) language support

View File

@@ -0,0 +1,5 @@
---
"n8n-nodes-papra": major
---
Added n8n nodes package for Papra

View File

@@ -0,0 +1,41 @@
name: CI - N8N Nodes
on:
pull_request:
push:
branches:
- main
jobs:
ci-packages-n8n-nodes:
name: CI - N8N Nodes
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/n8n-nodes
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Install pnpm
uses: pnpm/action-setup@v4
- uses: actions/setup-node@v4
with:
node-version: 22
cache: 'pnpm'
- name: Install dependencies
run: pnpm i
- name: Run linters
run: pnpm lint
- name: Type check
run: pnpm typecheck
# - name: Run unit test
# run: pnpm test
- name: Build the app
run: pnpm build

View File

@@ -1,11 +1,5 @@
# @papra/docs
## 0.5.3
### Patch Changes
- [#455](https://github.com/papra-hq/papra/pull/455) [`b33fde3`](https://github.com/papra-hq/papra/commit/b33fde35d3e8622e31b51aadfe56875d8e48a2ef) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Improved feedback message in case of invalid origin configuration
## 0.5.2
### Patch Changes

View File

@@ -1,7 +1,7 @@
{
"name": "@papra/docs",
"type": "module",
"version": "0.5.3",
"version": "0.5.2",
"private": true,
"packageManager": "pnpm@10.12.3",
"description": "Papra documentation website",

View File

@@ -31,7 +31,6 @@ Launch Papra with default configuration using:
docker run -d \
--name papra \
--restart unless-stopped \
--env APP_BASE_URL=http://localhost:1221 \
-p 1221:1221 \
ghcr.io/papra-hq/papra:latest
```
@@ -70,7 +69,6 @@ For production deployments, mount host directories to preserve application data
docker run -d \
--name papra \
--restart unless-stopped \
--env APP_BASE_URL=http://localhost:1221 \
-p 1221:1221 \
-v $(pwd)/papra-data:/app/app-data \
--user $(id -u):$(id -g) \

View File

@@ -24,17 +24,5 @@ To fix this, you can either:
- Ensure that the directory is owned by the user running the container
- Run the server as root (not recommended)
## Invalid application origin
Papra ensures [CSRF](https://en.wikipedia.org/wiki/Cross-site_request_forgery) protection by validating the Origin header in requests. This check ensures that requests originate from the application or a trusted source. Any request that does not originate from a trusted origin will be rejected.
If you are self-hosting Papra, you may encounter an error stating that the application origin is invalid while trying to login or register.
To fix this, you can either:
- Update the `APP_BASE_URL` environment variable to match the url of your application (e.g. `https://papra.my-homelab.tld`)
- Add the current url to the `TRUSTED_ORIGINS` environment variable if you need to allow multiple origins, comma separated. By default the `TRUSTED_ORIGINS` is set to the `APP_BASE_URL`
- If you are using a reverse proxy, you may need to add the url to the `TRUSTED_ORIGINS` environment variable

View File

@@ -1,27 +1,5 @@
# @papra/app-client
## 0.8.1
## 0.8.0
### Minor Changes
- [#432](https://github.com/papra-hq/papra/pull/432) [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Added new webhook events: document:updated, document:tag:added, document:tag:removed
- [#432](https://github.com/papra-hq/papra/pull/432) [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Webhooks invocation is now defered
### Patch Changes
- [#419](https://github.com/papra-hq/papra/pull/419) [`7768840`](https://github.com/papra-hq/papra/commit/7768840aa4425a03cb96dc1c17605bfa8e6a0de4) Thanks [@Edward205](https://github.com/Edward205)! - Added diacritics and improved wording for Romanian translation
- [#448](https://github.com/papra-hq/papra/pull/448) [`5868800`](https://github.com/papra-hq/papra/commit/5868800bcec6ed69b5441b50e4445fae5cdb5bfb) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Added feedback when an error occurs while deleting a tag
- [#412](https://github.com/papra-hq/papra/pull/412) [`ffdae8d`](https://github.com/papra-hq/papra/commit/ffdae8db56c6ecfe63eb263ee606e9469eef8874) Thanks [@OsafAliSayed](https://github.com/OsafAliSayed)! - Simplified the organization intake email list
- [#441](https://github.com/papra-hq/papra/pull/441) [`5e46bb9`](https://github.com/papra-hq/papra/commit/5e46bb9e6a39cd16a83636018370607a27db042a) Thanks [@Zavy86](https://github.com/Zavy86)! - Added Italian (it) language support
- [#455](https://github.com/papra-hq/papra/pull/455) [`b33fde3`](https://github.com/papra-hq/papra/commit/b33fde35d3e8622e31b51aadfe56875d8e48a2ef) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Improved feedback message in case of invalid origin configuration
## 0.7.0
### Minor Changes

View File

@@ -1,7 +1,7 @@
{
"name": "@papra/app-client",
"type": "module",
"version": "0.8.1",
"version": "0.7.0",
"private": true,
"packageManager": "pnpm@10.12.3",
"description": "Papra frontend client",

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: Die maximale Anzahl an Ei
api-errors.demo.not_available: Diese Funktion ist in der Demo nicht verfügbar
api-errors.tags.already_exists: Ein Tag mit diesem Namen existiert bereits für diese Organisation
api-errors.internal.error: Beim Verarbeiten Ihrer Anfrage ist ein Fehler aufgetreten. Bitte versuchen Sie es erneut.
api-errors.auth.invalid_origin: Ungültige Anwendungs-Ursprung. Wenn Sie Papra selbst hosten, stellen Sie sicher, dass Ihre APP_BASE_URL-Umgebungsvariable mit Ihrer aktuellen URL übereinstimmt. Weitere Details finden Sie unter https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: The maximum number of inv
api-errors.demo.not_available: This feature is not available in demo
api-errors.tags.already_exists: A tag with this name already exists for this organization
api-errors.internal.error: An error occurred while processing your request. Please try again later.
api-errors.auth.invalid_origin: Invalid application origin. If you are self-hosting Papra, ensure your APP_BASE_URL environment variable matches your current url. For more details see https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: Se ha alcanzado el númer
api-errors.demo.not_available: Esta función no está disponible en la demostración
api-errors.tags.already_exists: Ya existe una etiqueta con este nombre en esta organización
api-errors.internal.error: Ocurrió un error al procesar tu solicitud. Por favor, inténtalo de nuevo.
api-errors.auth.invalid_origin: Origen de la aplicación inválido. Si estás alojando Papra, asegúrate de que la variable de entorno APP_BASE_URL coincida con tu URL actual. Para más detalles, consulta https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: Le nombre maximum d'invit
api-errors.demo.not_available: Cette fonctionnalité n'est pas disponible dans la démo
api-errors.tags.already_exists: Un tag avec ce nom existe déjà pour cette organisation
api-errors.internal.error: Une erreur est survenue lors du traitement de votre requête. Veuillez réessayer.
api-errors.auth.invalid_origin: Origine de l'application invalide. Si vous hébergez Papra, assurez-vous que la variable d'environnement APP_BASE_URL correspond à votre URL actuelle. Pour plus de détails, consultez https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: È stato raggiunto il num
api-errors.demo.not_available: Questa funzionalità non è disponibile nella demo
api-errors.tags.already_exists: Esiste già un tag con questo nome per questa organizzazione
api-errors.internal.error: Si è verificato un errore durante l'elaborazione della richiesta. Riprova.
api-errors.auth.invalid_origin: Origine dell'applicazione non valida. Se stai ospitando Papra, assicurati che la variabile di ambiente APP_BASE_URL corrisponda all'URL corrente. Per maggiori dettagli, consulta https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: Osiągnięto maksymalną
api-errors.demo.not_available: Ta funkcja nie jest dostępna w wersji demo
api-errors.tags.already_exists: Tag o tej nazwie już istnieje w tej organizacji
api-errors.internal.error: Wystąpił błąd podczas przetwarzania żądania. Spróbuj ponownie później.
api-errors.auth.invalid_origin: Nieprawidłowa lokalizacja aplikacji. Jeśli hostujesz Papra, upewnij się, że zmienna środowiskowa APP_BASE_URL odpowiada bieżącemu adresowi URL. Aby uzyskać więcej informacji, zobacz https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: O número máximo de conv
api-errors.demo.not_available: Este recurso não está disponível em ambiente de demonstração
api-errors.tags.already_exists: Já existe uma tag com este nome nesta organização
api-errors.internal.error: Ocorreu um erro ao processar sua solicitação. Por favor, tente novamente.
api-errors.auth.invalid_origin: Origem da aplicação inválida. Se você está hospedando o Papra, certifique-se de que a variável de ambiente APP_BASE_URL corresponde à sua URL atual. Para mais detalhes, consulte https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: O número máximo de conv
api-errors.demo.not_available: Este recurso não está disponível em ambiente de demonstração
api-errors.tags.already_exists: Já existe uma etiqueta com este nome nesta organização
api-errors.internal.error: Ocorreu um erro ao processar a solicitação. Por favor, tente novamente.
api-errors.auth.invalid_origin: Origem da aplicação inválida. Se você está hospedando o Papra, certifique-se de que a variável de ambiente APP_BASE_URL corresponde à sua URL atual. Para mais detalhes, consulte https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -546,7 +546,6 @@ api-errors.user.organization_invitation_limit_reached: Numărul maxim de invita
api-errors.demo.not_available: Această functie nu este disponibila în demo
api-errors.tags.already_exists: O etichetă cu acest nume există deja pentru aceasta organizație
api-errors.internal.error: A apărut o eroare la procesarea cererii. Te rugăm să încerci din nou.
api-errors.auth.invalid_origin: Origine invalidă a aplicației. Dacă hospedezi Papra, asigură-te că variabila de mediu APP_BASE_URL corespunde URL-ului actual. Pentru mai multe detalii, consulta https://docs.papra.app/resources/troubleshooting/#invalid-application-origin
# Not found

View File

@@ -49,21 +49,12 @@ export async function authWithProvider({ provider, config }: { provider: SsoProv
const isCustomProvider = config.auth.providers.customs.some(({ providerId }) => providerId === provider.key);
if (isCustomProvider) {
const { error } = await signIn.oauth2({
signIn.oauth2({
providerId: provider.key,
callbackURL: config.baseUrl,
});
if (error) {
throw error;
}
return;
}
const { error } = await signIn.social({ provider: provider.key as 'github' | 'google', callbackURL: config.baseUrl });
if (error) {
throw error;
}
await signIn.social({ provider: provider.key as 'github' | 'google', callbackURL: config.baseUrl });
}

View File

@@ -1,47 +1,34 @@
import type { Component } from 'solid-js';
import { createSignal, Match, Switch } from 'solid-js';
import { useI18nApiErrors } from '@/modules/shared/http/composables/i18n-api-errors';
import { cn } from '@/modules/shared/style/cn';
import { Button } from '@/modules/ui/components/button';
export const SsoProviderButton: Component<{ name: string; icon?: string; onClick: () => Promise<void>; label: string }> = (props) => {
const [getIsLoading, setIsLoading] = createSignal(false);
const [getError, setError] = createSignal<string | undefined>(undefined);
const { getErrorMessage } = useI18nApiErrors();
const onClick = async () => {
setIsLoading(true);
try {
await props.onClick();
} catch (error) {
setError(getErrorMessage({ error }));
// reset loading only in catch as the auth via sso can take a while before the redirection happens
setIsLoading(false);
}
await props.onClick();
};
return (
<>
<Button variant="secondary" class="block w-full flex items-center justify-center gap-2" onClick={onClick} disabled={getIsLoading()}>
<Button variant="secondary" class="block w-full flex items-center justify-center gap-2" onClick={onClick} disabled={getIsLoading()}>
<Switch>
<Match when={getIsLoading()}>
<span class="i-tabler-loader-2 animate-spin" />
</Match>
<Switch>
<Match when={getIsLoading()}>
<span class="i-tabler-loader-2 animate-spin" />
</Match>
<Match when={props.icon?.startsWith('i-')}>
<span class={cn(`size-4.5`, props.icon)} />
</Match>
<Match when={props.icon?.startsWith('i-')}>
<span class={cn(`size-4.5`, props.icon)} />
</Match>
<Match when={props.icon}>
<img src={props.icon} alt={props.name} class="size-4.5" />
</Match>
</Switch>
<Match when={props.icon}>
<img src={props.icon} alt={props.name} class="size-4.5" />
</Match>
</Switch>
{props.label}
</Button>
{getError() && <p class="text-red-500">{getError()}</p>}
</>
{props.label}
</Button>
);
};

View File

@@ -6,7 +6,6 @@ import * as v from 'valibot';
import { useConfig } from '@/modules/config/config.provider';
import { useI18n } from '@/modules/i18n/i18n.provider';
import { createForm } from '@/modules/shared/form/form';
import { useI18nApiErrors } from '@/modules/shared/http/composables/i18n-api-errors';
import { Button } from '@/modules/ui/components/button';
import { Checkbox, CheckboxControl, CheckboxLabel } from '@/modules/ui/components/checkbox';
import { Separator } from '@/modules/ui/components/separator';
@@ -22,7 +21,6 @@ export const EmailLoginForm: Component = () => {
const navigate = useNavigate();
const { config } = useConfig();
const { t } = useI18n();
const { createI18nApiError } = useI18nApiErrors({ t });
const { form, Form, Field } = createForm({
onSubmit: async ({ email, password, rememberMe }) => {
@@ -33,7 +31,7 @@ export const EmailLoginForm: Component = () => {
}
if (error) {
throw createI18nApiError({ error });
throw error;
}
},
schema: v.object({

View File

@@ -6,7 +6,6 @@ import * as v from 'valibot';
import { useConfig } from '@/modules/config/config.provider';
import { useI18n } from '@/modules/i18n/i18n.provider';
import { createForm } from '@/modules/shared/form/form';
import { useI18nApiErrors } from '@/modules/shared/http/composables/i18n-api-errors';
import { Button } from '@/modules/ui/components/button';
import { Separator } from '@/modules/ui/components/separator';
import { TextField, TextFieldLabel, TextFieldRoot } from '@/modules/ui/components/textfield';
@@ -21,8 +20,6 @@ export const EmailRegisterForm: Component = () => {
const { config } = useConfig();
const navigate = useNavigate();
const { t } = useI18n();
const { createI18nApiError } = useI18nApiErrors({ t });
const { form, Form, Field } = createForm({
onSubmit: async ({ email, password, name }) => {
const { error } = await signUp.email({
@@ -33,7 +30,7 @@ export const EmailRegisterForm: Component = () => {
});
if (error) {
throw createI18nApiError({ error });
throw error;
}
if (config.auth.isEmailVerificationRequired) {

View File

@@ -70,14 +70,13 @@ describe('i18n models', () => {
expect(t('hello')).to.eql('Hello!');
});
test('the translator returns undefined if the key is not in the dictionary', () => {
test('the translator returns the key if the key is not in the dictionary', () => {
const dictionary = {
hello: 'Hello!',
};
const t = createTranslator({ getDictionary: () => dictionary });
expect(t('world' as any)).to.eql(undefined);
expect(t('world' as any, { name: 'John' })).to.eql(undefined);
expect(t('world' as any)).to.eql('world');
});
test('the translator replaces the placeholders in the translation', () => {

View File

@@ -36,15 +36,15 @@ export function createTranslator<Dict extends Record<string, string>>({ getDicti
console.warn(`Translation not found for key: ${String(key)}`);
}
if (args && translationFromDictionary) {
return Object.entries(args)
.reduce(
(acc, [key, value]) => acc.replace(new RegExp(`{{\\s*${key}\\s*}}`, 'g'), String(value)),
String(translationFromDictionary),
);
let translation: string = translationFromDictionary ?? key;
if (args) {
for (const [key, value] of Object.entries(args)) {
translation = translation.replace(new RegExp(`{{\\s*${key}\\s*}}`, 'g'), String(value));
}
}
return translationFromDictionary;
return translation;
};
}

View File

@@ -485,7 +485,6 @@ export type LocaleKeys =
| 'api-errors.demo.not_available'
| 'api-errors.tags.already_exists'
| 'api-errors.internal.error'
| 'api-errors.auth.invalid_origin'
| 'not-found.title'
| 'not-found.description'
| 'not-found.back-to-home'

View File

@@ -2,46 +2,28 @@ import type { LocaleKeys } from '@/modules/i18n/locales.types';
import { get } from 'lodash-es';
import { useI18n } from '@/modules/i18n/i18n.provider';
function codeToKey(code: string): LocaleKeys {
// Better auth may returns different error codes like INVALID_ORIGIN, INVALID_CALLBACKURL when the origin is invalid
// codes are here https://github.com/better-auth/better-auth/blob/canary/packages/better-auth/src/api/middlewares/origin-check.ts#L71 (in lower case)
if (code.match(/^INVALID_[A-Z]+URL$/) || code === 'INVALID_ORIGIN') {
return `api-errors.auth.invalid_origin`;
}
return `api-errors.${code}` as LocaleKeys;
}
export function useI18nApiErrors({ t = useI18n().t }: { t?: ReturnType<typeof useI18n>['t'] } = {}) {
const getDefaultErrorMessage = () => t('api-errors.default');
const getTranslationFromApiErrorCode = ({ code }: { code: string }) => {
return t(`api-errors.${code}` as LocaleKeys);
};
const getErrorMessage = (args: { error: unknown } | { code: string }) => {
if ('code' in args) {
const { code } = args;
return t(codeToKey(code)) ?? getDefaultErrorMessage();
const getTranslationFromApiError = ({ error }: { error: unknown }) => {
const code = get(error, 'data.error.code') ?? get(error, 'code');
if (!code) {
return t('api-errors.default');
}
if ('error' in args) {
const { error } = args;
const code = get(error, 'data.error.code') ?? get(error, 'code');
const translation = code ? t(codeToKey(code)) : undefined;
if (translation) {
return translation;
}
if (typeof error === 'object' && error && 'message' in error && typeof error.message === 'string') {
return error.message;
}
}
return getDefaultErrorMessage();
return getTranslationFromApiErrorCode({ code });
};
return {
getErrorMessage,
createI18nApiError: (args: { error: unknown } | { code: string }) => {
return new Error(getErrorMessage(args));
getErrorMessage: (args: { error: unknown } | { code: string }) => {
if ('error' in args) {
return getTranslationFromApiError({ error: args.error });
}
return getTranslationFromApiErrorCode({ code: args.code });
},
};
}

View File

@@ -1,33 +1,5 @@
# @papra/app-server
## 0.8.1
### Patch Changes
- [#459](https://github.com/papra-hq/papra/pull/459) [`f20559e`](https://github.com/papra-hq/papra/commit/f20559e95d1dc7d7a099dfd9a9df42bf5ce1b0b2) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Removed dev-dependency needed in production build
## 0.8.0
### Minor Changes
- [#452](https://github.com/papra-hq/papra/pull/452) [`7f7e5bf`](https://github.com/papra-hq/papra/commit/7f7e5bffcbcfb843f3b2458400dfb44409a44867) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Completely rewrote the migration mechanism
- [#447](https://github.com/papra-hq/papra/pull/447) [`b5ccc13`](https://github.com/papra-hq/papra/commit/b5ccc135ba7f4359eaf85221bcb40ee63ba7d6c7) Thanks [@CorentinTh](https://github.com/CorentinTh)! - The file content extraction (like OCR) is now done asynchronously by the task runner
- [#448](https://github.com/papra-hq/papra/pull/448) [`5868800`](https://github.com/papra-hq/papra/commit/5868800bcec6ed69b5441b50e4445fae5cdb5bfb) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Fixed the impossibility to delete a tag that has been assigned to a document
- [#432](https://github.com/papra-hq/papra/pull/432) [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Added new webhook events: document:updated, document:tag:added, document:tag:removed
- [#432](https://github.com/papra-hq/papra/pull/432) [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Webhooks invocation is now defered
### Patch Changes
- [#455](https://github.com/papra-hq/papra/pull/455) [`b33fde3`](https://github.com/papra-hq/papra/commit/b33fde35d3e8622e31b51aadfe56875d8e48a2ef) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Improved feedback message in case of invalid origin configuration
- Updated dependencies [[`a8cff8c`](https://github.com/papra-hq/papra/commit/a8cff8cedc062be3ed1d454e9de6e456553a4d8c), [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77), [`6723baf`](https://github.com/papra-hq/papra/commit/6723baf98ad46f989fe1e1e19ad0dd25622cca77), [`67b3b14`](https://github.com/papra-hq/papra/commit/67b3b14cdfa994874c695b9d854a93160ba6a911)]:
- @papra/webhooks@0.2.0
- @papra/lecture@0.1.0
## 0.7.0
### Minor Changes

View File

@@ -4,7 +4,7 @@ import { defineConfig } from 'drizzle-kit';
export default defineConfig({
schema: ['./src/modules/**/*.table.ts', './src/modules/**/*.tables.ts'],
dialect: 'turso',
out: './src/migrations',
out: './migrations',
dbCredentials: {
url: env.DATABASE_URL ?? 'file:./db.sqlite',
authToken: env.DATABASE_AUTH_TOKEN,

View File

@@ -0,0 +1,172 @@
CREATE TABLE `documents` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`is_deleted` integer DEFAULT false NOT NULL,
`deleted_at` integer,
`organization_id` text NOT NULL,
`created_by` text,
`deleted_by` text,
`original_name` text NOT NULL,
`original_size` integer DEFAULT 0 NOT NULL,
`original_storage_key` text NOT NULL,
`original_sha256_hash` text NOT NULL,
`name` text NOT NULL,
`mime_type` text NOT NULL,
`content` text DEFAULT '' NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`created_by`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE set null,
FOREIGN KEY (`deleted_by`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE set null
);
--> statement-breakpoint
CREATE INDEX `documents_organization_id_is_deleted_created_at_index` ON `documents` (`organization_id`,`is_deleted`,`created_at`);--> statement-breakpoint
CREATE INDEX `documents_organization_id_is_deleted_index` ON `documents` (`organization_id`,`is_deleted`);--> statement-breakpoint
CREATE UNIQUE INDEX `documents_organization_id_original_sha256_hash_unique` ON `documents` (`organization_id`,`original_sha256_hash`);--> statement-breakpoint
CREATE INDEX `documents_original_sha256_hash_index` ON `documents` (`original_sha256_hash`);--> statement-breakpoint
CREATE INDEX `documents_organization_id_size_index` ON `documents` (`organization_id`,`original_size`);--> statement-breakpoint
CREATE TABLE `organization_invitations` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`organization_id` text NOT NULL,
`email` text NOT NULL,
`role` text,
`status` text NOT NULL,
`expires_at` integer NOT NULL,
`inviter_id` text NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`inviter_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `organization_members` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`organization_id` text NOT NULL,
`user_id` text NOT NULL,
`role` text NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE UNIQUE INDEX `organization_members_user_organization_unique` ON `organization_members` (`organization_id`,`user_id`);--> statement-breakpoint
CREATE TABLE `organizations` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`name` text NOT NULL,
`customer_id` text
);
--> statement-breakpoint
CREATE TABLE `user_roles` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`user_id` text NOT NULL,
`role` text NOT NULL,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `user_roles_role_index` ON `user_roles` (`role`);--> statement-breakpoint
CREATE UNIQUE INDEX `user_roles_user_id_role_unique_index` ON `user_roles` (`user_id`,`role`);--> statement-breakpoint
CREATE TABLE `documents_tags` (
`document_id` text NOT NULL,
`tag_id` text NOT NULL,
PRIMARY KEY(`document_id`, `tag_id`),
FOREIGN KEY (`document_id`) REFERENCES `documents`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`tag_id`) REFERENCES `tags`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `tags` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`organization_id` text NOT NULL,
`name` text NOT NULL,
`color` text NOT NULL,
`description` text,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE UNIQUE INDEX `tags_organization_id_name_unique` ON `tags` (`organization_id`,`name`);--> statement-breakpoint
CREATE TABLE `users` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`email` text NOT NULL,
`email_verified` integer DEFAULT false NOT NULL,
`name` text,
`image` text,
`max_organization_count` integer
);
--> statement-breakpoint
CREATE UNIQUE INDEX `users_email_unique` ON `users` (`email`);--> statement-breakpoint
CREATE INDEX `users_email_index` ON `users` (`email`);--> statement-breakpoint
CREATE TABLE `auth_accounts` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`user_id` text,
`account_id` text NOT NULL,
`provider_id` text NOT NULL,
`access_token` text,
`refresh_token` text,
`access_token_expires_at` integer,
`refresh_token_expires_at` integer,
`scope` text,
`id_token` text,
`password` text,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `auth_sessions` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`token` text NOT NULL,
`user_id` text,
`expires_at` integer NOT NULL,
`ip_address` text,
`user_agent` text,
`active_organization_id` text,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`active_organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE set null
);
--> statement-breakpoint
CREATE INDEX `auth_sessions_token_index` ON `auth_sessions` (`token`);--> statement-breakpoint
CREATE TABLE `auth_verifications` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`identifier` text NOT NULL,
`value` text NOT NULL,
`expires_at` integer NOT NULL
);
--> statement-breakpoint
CREATE INDEX `auth_verifications_identifier_index` ON `auth_verifications` (`identifier`);--> statement-breakpoint
CREATE TABLE `intake_emails` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`email_address` text NOT NULL,
`organization_id` text NOT NULL,
`allowed_origins` text DEFAULT '[]' NOT NULL,
`is_enabled` integer DEFAULT true NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE UNIQUE INDEX `intake_emails_email_address_unique` ON `intake_emails` (`email_address`);--> statement-breakpoint
CREATE TABLE `organization_subscriptions` (
`id` text PRIMARY KEY NOT NULL,
`customer_id` text NOT NULL,
`organization_id` text NOT NULL,
`plan_id` text NOT NULL,
`status` text NOT NULL,
`seats_count` integer NOT NULL,
`current_period_end` integer NOT NULL,
`current_period_start` integer NOT NULL,
`cancel_at_period_end` integer DEFAULT false NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade
);

View File

@@ -0,0 +1,23 @@
-- Migration for adding full-text search virtual table for documents
CREATE VIRTUAL TABLE documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4');
--> statement-breakpoint
-- Copy data from documents to documents_fts for existing records
INSERT INTO documents_fts(id, name, original_name, content)
SELECT id, name, original_name, content FROM documents;
--> statement-breakpoint
CREATE TRIGGER trigger_documents_fts_insert AFTER INSERT ON documents BEGIN
INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content);
END;
--> statement-breakpoint
CREATE TRIGGER trigger_documents_fts_update AFTER UPDATE ON documents BEGIN
UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id;
END;
--> statement-breakpoint
CREATE TRIGGER trigger_documents_fts_delete AFTER DELETE ON documents BEGIN
DELETE FROM documents_fts WHERE id = old.id;
END;

View File

@@ -0,0 +1,32 @@
CREATE TABLE `tagging_rule_actions` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`tagging_rule_id` text NOT NULL,
`tag_id` text NOT NULL,
FOREIGN KEY (`tagging_rule_id`) REFERENCES `tagging_rules`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`tag_id`) REFERENCES `tags`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `tagging_rule_conditions` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`tagging_rule_id` text NOT NULL,
`field` text NOT NULL,
`operator` text NOT NULL,
`value` text NOT NULL,
`is_case_sensitive` integer DEFAULT false NOT NULL,
FOREIGN KEY (`tagging_rule_id`) REFERENCES `tagging_rules`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `tagging_rules` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`organization_id` text NOT NULL,
`name` text NOT NULL,
`description` text,
`enabled` integer DEFAULT true NOT NULL,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade
);

View File

@@ -0,0 +1,24 @@
CREATE TABLE `api_key_organizations` (
`api_key_id` text NOT NULL,
`organization_member_id` text NOT NULL,
FOREIGN KEY (`api_key_id`) REFERENCES `api_keys`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`organization_member_id`) REFERENCES `organization_members`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `api_keys` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`name` text NOT NULL,
`key_hash` text NOT NULL,
`prefix` text NOT NULL,
`user_id` text NOT NULL,
`last_used_at` integer,
`expires_at` integer,
`permissions` text DEFAULT '[]' NOT NULL,
`all_organizations` integer DEFAULT false NOT NULL,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE UNIQUE INDEX `api_keys_key_hash_unique` ON `api_keys` (`key_hash`);--> statement-breakpoint
CREATE INDEX `key_hash_index` ON `api_keys` (`key_hash`);

View File

@@ -0,0 +1,35 @@
CREATE TABLE `webhook_deliveries` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`webhook_id` text NOT NULL,
`event_name` text NOT NULL,
`request_payload` text NOT NULL,
`response_payload` text NOT NULL,
`response_status` integer NOT NULL,
FOREIGN KEY (`webhook_id`) REFERENCES `webhooks`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `webhook_events` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`webhook_id` text NOT NULL,
`event_name` text NOT NULL,
FOREIGN KEY (`webhook_id`) REFERENCES `webhooks`(`id`) ON UPDATE cascade ON DELETE cascade
);
--> statement-breakpoint
CREATE UNIQUE INDEX `webhook_events_webhook_id_event_name_unique` ON `webhook_events` (`webhook_id`,`event_name`);--> statement-breakpoint
CREATE TABLE `webhooks` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`name` text NOT NULL,
`url` text NOT NULL,
`secret` text,
`enabled` integer DEFAULT true NOT NULL,
`created_by` text,
`organization_id` text,
FOREIGN KEY (`created_by`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE set null,
FOREIGN KEY (`organization_id`) REFERENCES `organizations`(`id`) ON UPDATE cascade ON DELETE cascade
);

View File

@@ -0,0 +1,4 @@
ALTER TABLE `organization_invitations` ALTER COLUMN "role" TO "role" text NOT NULL;--> statement-breakpoint
CREATE UNIQUE INDEX `organization_invitations_organization_email_unique` ON `organization_invitations` (`organization_id`,`email`);--> statement-breakpoint
ALTER TABLE `organization_invitations` ALTER COLUMN "status" TO "status" text NOT NULL DEFAULT 'pending';

View File

@@ -0,0 +1,12 @@
CREATE TABLE `document_activity_log` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`document_id` text NOT NULL,
`event` text NOT NULL,
`event_data` text,
`user_id` text,
`tag_id` text,
FOREIGN KEY (`document_id`) REFERENCES `documents`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE no action,
FOREIGN KEY (`tag_id`) REFERENCES `tags`(`id`) ON UPDATE cascade ON DELETE no action
);

View File

@@ -0,0 +1,18 @@
PRAGMA foreign_keys=OFF;--> statement-breakpoint
CREATE TABLE `__new_document_activity_log` (
`id` text PRIMARY KEY NOT NULL,
`created_at` integer NOT NULL,
`document_id` text NOT NULL,
`event` text NOT NULL,
`event_data` text,
`user_id` text,
`tag_id` text,
FOREIGN KEY (`document_id`) REFERENCES `documents`(`id`) ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE cascade ON DELETE set null,
FOREIGN KEY (`tag_id`) REFERENCES `tags`(`id`) ON UPDATE cascade ON DELETE set null
);
--> statement-breakpoint
INSERT INTO `__new_document_activity_log`("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id") SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM `document_activity_log`;--> statement-breakpoint
DROP TABLE `document_activity_log`;--> statement-breakpoint
ALTER TABLE `__new_document_activity_log` RENAME TO `document_activity_log`;--> statement-breakpoint
PRAGMA foreign_keys=ON;

View File

@@ -1,7 +1,7 @@
{
"name": "@papra/app-server",
"type": "module",
"version": "0.8.1",
"version": "0.7.0",
"private": true,
"packageManager": "pnpm@10.12.3",
"description": "Papra app server",
@@ -12,16 +12,14 @@
"dev": "tsx watch --env-file-if-exists=.env src/index.ts | crowlog-pretty",
"build": "pnpm esbuild --bundle src/index.ts --platform=node --packages=external --format=esm --outfile=dist/index.js --minify",
"start": "node dist/index.js",
"start:with-migrations": "pnpm migrate:up:prod && pnpm start",
"start:with-migrations": "pnpm migrate:up && pnpm start",
"lint": "eslint .",
"lint:fix": "eslint --fix .",
"test": "vitest run",
"test:watch": "vitest watch",
"typecheck": "tsc --noEmit",
"migrate:up": "tsx --env-file-if-exists=.env src/scripts/migrate-up.script.ts | crowlog-pretty",
"migrate:up:prod": "tsx src/scripts/migrate-up.script.ts",
"migrate:up": "tsx --env-file-if-exists=.env src/scripts/migrate-up.script.ts",
"migrate:push": "drizzle-kit push",
"migrate:create": "sh -c 'drizzle-kit generate --name \"$1\" && tsx --env-file-if-exists=.env src/scripts/create-migration.ts \"$1\" | crowlog-pretty' --",
"db:studio": "drizzle-kit studio",
"clean:dist": "rm -rf dist",
"clean:db": "rm db.sqlite",
@@ -87,7 +85,6 @@
"@vitest/coverage-v8": "catalog:",
"esbuild": "^0.24.2",
"eslint": "catalog:",
"magicast": "^0.3.5",
"memfs": "^4.17.2",
"typescript": "catalog:",
"vitest": "catalog:"

View File

@@ -1,51 +0,0 @@
import { sql } from 'drizzle-orm';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../../modules/app/database/database';
import { initialSchemaSetupMigration } from './0001-initial-schema-setup.migration';
describe('0001-initial-schema-setup migration', () => {
describe('initialSchemaSetupMigration', () => {
test('the up setup some default tables', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await initialSchemaSetupMigration.up({ db });
const { rows: existingTables } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
expect(existingTables.map(({ name }) => name)).to.eql([
'documents',
'documents_organization_id_is_deleted_created_at_index',
'documents_organization_id_is_deleted_index',
'documents_organization_id_original_sha256_hash_unique',
'documents_original_sha256_hash_index',
'documents_organization_id_size_index',
'organization_invitations',
'organization_members',
'organization_members_user_organization_unique',
'organizations',
'user_roles',
'user_roles_role_index',
'user_roles_user_id_role_unique_index',
'documents_tags',
'tags',
'tags_organization_id_name_unique',
'users',
'users_email_unique',
'users_email_index',
'auth_accounts',
'auth_sessions',
'auth_sessions_token_index',
'auth_verifications',
'auth_verifications_identifier_index',
'intake_emails',
'intake_emails_email_address_unique',
'organization_subscriptions',
]);
await initialSchemaSetupMigration.down({ db });
const { rows: existingTablesAfterDown } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
expect(existingTablesAfterDown.map(({ name }) => name)).to.eql([]);
});
});
});

View File

@@ -1,220 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const initialSchemaSetupMigration = {
name: 'initial-schema-setup',
description: 'Creation of the base tables for the application',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "documents" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"is_deleted" integer DEFAULT false NOT NULL,
"deleted_at" integer,
"organization_id" text NOT NULL,
"created_by" text,
"deleted_by" text,
"original_name" text NOT NULL,
"original_size" integer DEFAULT 0 NOT NULL,
"original_storage_key" text NOT NULL,
"original_sha256_hash" text NOT NULL,
"name" text NOT NULL,
"mime_type" text NOT NULL,
"content" text DEFAULT '' NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("deleted_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null
);
`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_is_deleted_created_at_index" ON "documents" ("organization_id","is_deleted","created_at");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_is_deleted_index" ON "documents" ("organization_id","is_deleted");`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "documents_organization_id_original_sha256_hash_unique" ON "documents" ("organization_id","original_sha256_hash");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_original_sha256_hash_index" ON "documents" ("original_sha256_hash");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "documents_organization_id_size_index" ON "documents" ("organization_id","original_size");`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "organization_invitations" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"email" text NOT NULL,
"role" text,
"status" text NOT NULL,
"expires_at" integer NOT NULL,
"inviter_id" text NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("inviter_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`CREATE TABLE IF NOT EXISTS "organization_members" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"user_id" text NOT NULL,
"role" text NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "organization_members_user_organization_unique" ON "organization_members" ("organization_id","user_id");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "organizations" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"customer_id" text
);`),
db.run(sql`CREATE TABLE IF NOT EXISTS "user_roles" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"user_id" text NOT NULL,
"role" text NOT NULL,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE INDEX IF NOT EXISTS "user_roles_role_index" ON "user_roles" ("role");`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "user_roles_user_id_role_unique_index" ON "user_roles" ("user_id","role");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "documents_tags" (
"document_id" text NOT NULL,
"tag_id" text NOT NULL,
PRIMARY KEY("document_id", "tag_id"),
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE TABLE IF NOT EXISTS "tags" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"name" text NOT NULL,
"color" text NOT NULL,
"description" text,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "tags_organization_id_name_unique" ON "tags" ("organization_id","name");`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "users" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"email" text NOT NULL,
"email_verified" integer DEFAULT false NOT NULL,
"name" text,
"image" text,
"max_organization_count" integer
);
`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "users_email_unique" ON "users" ("email");`),
db.run(sql`CREATE INDEX IF NOT EXISTS "users_email_index" ON "users" ("email");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_accounts" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"user_id" text,
"account_id" text NOT NULL,
"provider_id" text NOT NULL,
"access_token" text,
"refresh_token" text,
"access_token_expires_at" integer,
"refresh_token_expires_at" integer,
"scope" text,
"id_token" text,
"password" text,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_sessions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"token" text NOT NULL,
"user_id" text,
"expires_at" integer NOT NULL,
"ip_address" text,
"user_agent" text,
"active_organization_id" text,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("active_organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE set null
);`),
db.run(sql`CREATE INDEX IF NOT EXISTS "auth_sessions_token_index" ON "auth_sessions" ("token");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "auth_verifications" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"identifier" text NOT NULL,
"value" text NOT NULL,
"expires_at" integer NOT NULL
);`),
db.run(sql`CREATE INDEX IF NOT EXISTS "auth_verifications_identifier_index" ON "auth_verifications" ("identifier");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "intake_emails" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"email_address" text NOT NULL,
"organization_id" text NOT NULL,
"allowed_origins" text DEFAULT '[]' NOT NULL,
"is_enabled" integer DEFAULT true NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "intake_emails_email_address_unique" ON "intake_emails" ("email_address");`),
db.run(sql`CREATE TABLE IF NOT EXISTS "organization_subscriptions" (
"id" text PRIMARY KEY NOT NULL,
"customer_id" text NOT NULL,
"organization_id" text NOT NULL,
"plan_id" text NOT NULL,
"status" text NOT NULL,
"seats_count" integer NOT NULL,
"current_period_end" integer NOT NULL,
"current_period_start" integer NOT NULL,
"cancel_at_period_end" integer DEFAULT false NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);`),
]);
},
down: async ({ db }) => {
await db.batch([
// Tables
db.run(sql`DROP TABLE IF EXISTS "organization_subscriptions";`),
db.run(sql`DROP TABLE IF EXISTS "intake_emails";`),
db.run(sql`DROP TABLE IF EXISTS "auth_verifications";`),
db.run(sql`DROP TABLE IF EXISTS "auth_sessions";`),
db.run(sql`DROP TABLE IF EXISTS "auth_accounts";`),
db.run(sql`DROP TABLE IF EXISTS "tags";`),
db.run(sql`DROP TABLE IF EXISTS "documents_tags";`),
db.run(sql`DROP TABLE IF EXISTS "user_roles";`),
db.run(sql`DROP TABLE IF EXISTS "organizations";`),
db.run(sql`DROP TABLE IF EXISTS "organization_members";`),
db.run(sql`DROP TABLE IF EXISTS "organization_invitations";`),
db.run(sql`DROP TABLE IF EXISTS "documents";`),
db.run(sql`DROP TABLE IF EXISTS "users";`),
// // Indexes
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_is_deleted_created_at_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_is_deleted_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_original_sha256_hash_unique";`),
db.run(sql`DROP INDEX IF EXISTS "documents_original_sha256_hash_index";`),
db.run(sql`DROP INDEX IF EXISTS "documents_organization_id_size_index";`),
db.run(sql`DROP INDEX IF EXISTS "user_roles_role_index";`),
db.run(sql`DROP INDEX IF EXISTS "user_roles_user_id_role_unique_index";`),
db.run(sql`DROP INDEX IF EXISTS "tags_organization_id_name_unique";`),
db.run(sql`DROP INDEX IF EXISTS "users_email_unique";`),
]);
},
} satisfies Migration;

View File

@@ -1,37 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const documentsFtsMigration = {
name: 'documents-fts',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE VIRTUAL TABLE IF NOT EXISTS documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4')`),
db.run(sql`INSERT INTO documents_fts(id, name, original_name, content) SELECT id, name, original_name, content FROM documents`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_insert AFTER INSERT ON documents BEGIN
INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content);
END
`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_update AFTER UPDATE ON documents BEGIN
UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id;
END
`),
db.run(sql`
CREATE TRIGGER IF NOT EXISTS trigger_documents_fts_delete AFTER DELETE ON documents BEGIN
DELETE FROM documents_fts WHERE id = old.id;
END
`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_insert`),
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_update`),
db.run(sql`DROP TRIGGER IF EXISTS trigger_documents_fts_delete`),
db.run(sql`DROP TABLE IF EXISTS documents_fts`),
]);
},
} satisfies Migration;

View File

@@ -1,57 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const taggingRulesMigration = {
name: 'tagging-rules',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rule_actions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"tagging_rule_id" text NOT NULL,
"tag_id" text NOT NULL,
FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rule_conditions" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"tagging_rule_id" text NOT NULL,
"field" text NOT NULL,
"operator" text NOT NULL,
"value" text NOT NULL,
"is_case_sensitive" integer DEFAULT false NOT NULL,
FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "tagging_rules" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"organization_id" text NOT NULL,
"name" text NOT NULL,
"description" text,
"enabled" integer DEFAULT true NOT NULL,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);
`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "tagging_rule_actions"`),
db.run(sql`DROP TABLE IF EXISTS "tagging_rule_conditions"`),
db.run(sql`DROP TABLE IF EXISTS "tagging_rules"`),
]);
},
} satisfies Migration;

View File

@@ -1,46 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const apiKeysMigration = {
name: 'api-keys',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "api_key_organizations" (
"api_key_id" text NOT NULL,
"organization_member_id" text NOT NULL,
FOREIGN KEY ("api_key_id") REFERENCES "api_keys"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("organization_member_id") REFERENCES "organization_members"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "api_keys" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"key_hash" text NOT NULL,
"prefix" text NOT NULL,
"user_id" text NOT NULL,
"last_used_at" integer,
"expires_at" integer,
"permissions" text DEFAULT '[]' NOT NULL,
"all_organizations" integer DEFAULT false NOT NULL,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "api_keys_key_hash_unique" ON "api_keys" ("key_hash")`),
db.run(sql`CREATE INDEX IF NOT EXISTS "key_hash_index" ON "api_keys" ("key_hash")`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "api_key_organizations"`),
db.run(sql`DROP TABLE IF EXISTS "api_keys"`),
db.run(sql`DROP INDEX IF EXISTS "api_keys_key_hash_unique"`),
db.run(sql`DROP INDEX IF EXISTS "key_hash_index"`),
]);
},
} satisfies Migration;

View File

@@ -1,62 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const organizationsWebhooksMigration = {
name: 'organizations-webhooks',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhook_deliveries" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"webhook_id" text NOT NULL,
"event_name" text NOT NULL,
"request_payload" text NOT NULL,
"response_payload" text NOT NULL,
"response_status" integer NOT NULL,
FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhook_events" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"webhook_id" text NOT NULL,
"event_name" text NOT NULL,
FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade
);
`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "webhook_events_webhook_id_event_name_unique" ON "webhook_events" ("webhook_id","event_name")`),
db.run(sql`
CREATE TABLE IF NOT EXISTS "webhooks" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"updated_at" integer NOT NULL,
"name" text NOT NULL,
"url" text NOT NULL,
"secret" text,
"enabled" integer DEFAULT true NOT NULL,
"created_by" text,
"organization_id" text,
FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade
);
`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "webhook_deliveries"`),
db.run(sql`DROP TABLE IF EXISTS "webhook_events"`),
db.run(sql`DROP INDEX IF EXISTS "webhook_events_webhook_id_event_name_unique"`),
db.run(sql`DROP TABLE IF EXISTS "webhooks"`),
]);
},
} satisfies Migration;

View File

@@ -1,22 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const organizationsInvitationsImprovementMigration = {
name: 'organizations-invitations-improvement',
up: async ({ db }) => {
await db.batch([
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text NOT NULL`),
db.run(sql`CREATE UNIQUE INDEX IF NOT EXISTS "organization_invitations_organization_email_unique" ON "organization_invitations" ("organization_id","email")`),
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text NOT NULL DEFAULT 'pending'`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "role" TO "role" text`),
db.run(sql`DROP INDEX IF EXISTS "organization_invitations_organization_email_unique"`),
db.run(sql`ALTER TABLE "organization_invitations" ALTER COLUMN "status" TO "status" text NOT NULL`),
]);
},
} satisfies Migration;

View File

@@ -1,31 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const documentActivityLogMigration = {
name: 'document-activity-log',
up: async ({ db }) => {
await db.batch([
db.run(sql`
CREATE TABLE IF NOT EXISTS "document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE no action,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE no action
);
`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
]);
},
} satisfies Migration;

View File

@@ -1,56 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const documentActivityLogOnDeleteSetNullMigration = {
name: 'document-activity-log-on-delete-set-null',
up: async ({ db }) => {
await db.batch([
db.run(sql`PRAGMA foreign_keys=OFF`),
db.run(sql`
CREATE TABLE "__new_document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE set null
);
`),
db.run(sql`
INSERT INTO "__new_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id") SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log";
`),
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
db.run(sql`ALTER TABLE "__new_document_activity_log" RENAME TO "document_activity_log"`),
db.run(sql`PRAGMA foreign_keys=ON`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`PRAGMA foreign_keys=OFF`),
db.run(sql`
CREATE TABLE "__restore_document_activity_log" (
"id" text PRIMARY KEY NOT NULL,
"created_at" integer NOT NULL,
"document_id" text NOT NULL,
"event" text NOT NULL,
"event_data" text,
"user_id" text,
"tag_id" text,
FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade,
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE no action,
FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE no action
);
`),
db.run(sql`INSERT INTO "__restore_document_activity_log"("id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id") SELECT "id", "created_at", "document_id", "event", "event_data", "user_id", "tag_id" FROM "document_activity_log";`),
db.run(sql`DROP TABLE IF EXISTS "document_activity_log"`),
db.run(sql`ALTER TABLE "__restore_document_activity_log" RENAME TO "document_activity_log"`),
db.run(sql`PRAGMA foreign_keys=ON`),
]);
},
} satisfies Migration;

View File

@@ -1,12 +0,0 @@
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const dropLegacyMigrationsMigration = {
name: 'drop-legacy-migrations',
description: 'Drop the legacy migrations table as it is not used anymore',
up: async ({ db }) => {
await db.run(sql`DROP TABLE IF EXISTS "__drizzle_migrations"`);
},
} satisfies Migration;

View File

@@ -1,14 +0,0 @@
import { index, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core';
export const migrationsTable = sqliteTable(
'migrations',
{
id: integer('id').primaryKey({ autoIncrement: true }),
name: text('name').notNull(),
runAt: integer('run_at', { mode: 'timestamp_ms' }).notNull().$default(() => new Date()),
},
t => [
index('name_index').on(t.name),
index('run_at_index').on(t.runAt),
],
);

View File

@@ -1,141 +0,0 @@
import type { Migration } from './migrations.types';
import { sql } from 'drizzle-orm';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../modules/app/database/database';
import { serializeSchema } from '../modules/app/database/database.test-utils';
import { migrations } from './migrations.registry';
import { rollbackLastAppliedMigration, runMigrations } from './migrations.usecases';
describe('migrations registry', () => {
describe('migrations', () => {
test('each migration should have a unique name', () => {
const migrationNames = migrations.map(m => m.name);
const duplicateMigrationNames = migrationNames.filter(name => migrationNames.filter(n => n === name).length > 1);
expect(duplicateMigrationNames).to.eql([], 'Each migration should have a unique name');
});
test('each migration should have a non empty name', () => {
const migrationNames = migrations.map(m => m.name);
const emptyMigrationNames = migrationNames.filter(name => name === '');
expect(emptyMigrationNames).to.eql([], 'Each migration should have a non empty name');
});
test('all migrations must be able to be applied without error and the database should be in a consistent state', async () => {
const { db } = setupDatabase({ url: ':memory:' });
// This will throw if any migration is not able to be applied
await runMigrations({ db, migrations });
// check foreign keys are enabled
const { rows } = await db.run(sql`pragma foreign_keys;`);
expect(rows).to.eql([{ foreign_keys: 1 }]);
});
test('we can stop to any migration and still have a consistent database state', async () => {
// Given like 3 migrations [A,B,C], creates [[A], [A,B], [A,B,C]]
const migrationCombinations = migrations.map((m, i) => migrations.slice(0, i + 1));
for (const migrationCombination of migrationCombinations) {
const { db } = setupDatabase({ url: ':memory:' });
await runMigrations({ db, migrations: migrationCombination });
}
});
test('when we rollback to a previous migration, the database should be in the state of the previous migration', async () => {
// Given like 3 migrations [A,B,C], creates [[A], [A,B], [A,B,C]]
const migrationCombinations = migrations.map((m, i) => migrations.slice(0, i + 1));
for (const [index, migrationCombination] of migrationCombinations.entries()) {
const { db } = setupDatabase({ url: ':memory:' });
const previousMigration = migrationCombinations[index - 1] ?? [] as Migration[];
await runMigrations({ db, migrations: previousMigration });
const previousDbState = await serializeSchema({ db });
await runMigrations({ db, migrations: migrationCombination });
await rollbackLastAppliedMigration({ db });
const currentDbState = await serializeSchema({ db });
expect(currentDbState).to.eql(previousDbState, `Downgrading from ${migrationCombination.at(-1)?.name ?? 'no migration'} should result in the same state as the previous migration`);
}
});
test('regression test of the database state after running migrations, update the snapshot when the database state changes', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await runMigrations({ db, migrations });
expect(await serializeSchema({ db })).toMatchInlineSnapshot(`
"CREATE UNIQUE INDEX "api_keys_key_hash_unique" ON "api_keys" ("key_hash");
CREATE INDEX "auth_sessions_token_index" ON "auth_sessions" ("token");
CREATE INDEX "auth_verifications_identifier_index" ON "auth_verifications" ("identifier");
CREATE INDEX "documents_organization_id_is_deleted_created_at_index" ON "documents" ("organization_id","is_deleted","created_at");
CREATE INDEX "documents_organization_id_is_deleted_index" ON "documents" ("organization_id","is_deleted");
CREATE UNIQUE INDEX "documents_organization_id_original_sha256_hash_unique" ON "documents" ("organization_id","original_sha256_hash");
CREATE INDEX "documents_organization_id_size_index" ON "documents" ("organization_id","original_size");
CREATE INDEX "documents_original_sha256_hash_index" ON "documents" ("original_sha256_hash");
CREATE UNIQUE INDEX "intake_emails_email_address_unique" ON "intake_emails" ("email_address");
CREATE INDEX "key_hash_index" ON "api_keys" ("key_hash");
CREATE INDEX migrations_name_index ON migrations (name);
CREATE INDEX migrations_run_at_index ON migrations (run_at);
CREATE UNIQUE INDEX "organization_invitations_organization_email_unique" ON "organization_invitations" ("organization_id","email");
CREATE UNIQUE INDEX "organization_members_user_organization_unique" ON "organization_members" ("organization_id","user_id");
CREATE UNIQUE INDEX "tags_organization_id_name_unique" ON "tags" ("organization_id","name");
CREATE INDEX "user_roles_role_index" ON "user_roles" ("role");
CREATE UNIQUE INDEX "user_roles_user_id_role_unique_index" ON "user_roles" ("user_id","role");
CREATE INDEX "users_email_index" ON "users" ("email");
CREATE UNIQUE INDEX "users_email_unique" ON "users" ("email");
CREATE UNIQUE INDEX "webhook_events_webhook_id_event_name_unique" ON "webhook_events" ("webhook_id","event_name");
CREATE TABLE "api_key_organizations" ( "api_key_id" text NOT NULL, "organization_member_id" text NOT NULL, FOREIGN KEY ("api_key_id") REFERENCES "api_keys"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("organization_member_id") REFERENCES "organization_members"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "api_keys" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "key_hash" text NOT NULL, "prefix" text NOT NULL, "user_id" text NOT NULL, "last_used_at" integer, "expires_at" integer, "permissions" text DEFAULT '[]' NOT NULL, "all_organizations" integer DEFAULT false NOT NULL, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "auth_accounts" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "user_id" text, "account_id" text NOT NULL, "provider_id" text NOT NULL, "access_token" text, "refresh_token" text, "access_token_expires_at" integer, "refresh_token_expires_at" integer, "scope" text, "id_token" text, "password" text, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "auth_sessions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "token" text NOT NULL, "user_id" text, "expires_at" integer NOT NULL, "ip_address" text, "user_agent" text, "active_organization_id" text, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("active_organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE set null );
CREATE TABLE "auth_verifications" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "identifier" text NOT NULL, "value" text NOT NULL, "expires_at" integer NOT NULL );
CREATE TABLE "document_activity_log" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "document_id" text NOT NULL, "event" text NOT NULL, "event_data" text, "user_id" text, "tag_id" text, FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE set null );
CREATE TABLE "documents" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "is_deleted" integer DEFAULT false NOT NULL, "deleted_at" integer, "organization_id" text NOT NULL, "created_by" text, "deleted_by" text, "original_name" text NOT NULL, "original_size" integer DEFAULT 0 NOT NULL, "original_storage_key" text NOT NULL, "original_sha256_hash" text NOT NULL, "name" text NOT NULL, "mime_type" text NOT NULL, "content" text DEFAULT '' NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("deleted_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null );
CREATE VIRTUAL TABLE documents_fts USING fts5(id UNINDEXED, name, original_name, content, prefix='2 3 4');
CREATE TABLE 'documents_fts_config'(k PRIMARY KEY, v) WITHOUT ROWID;
CREATE TABLE 'documents_fts_content'(id INTEGER PRIMARY KEY, c0, c1, c2, c3);
CREATE TABLE 'documents_fts_data'(id INTEGER PRIMARY KEY, block BLOB);
CREATE TABLE 'documents_fts_docsize'(id INTEGER PRIMARY KEY, sz BLOB);
CREATE TABLE 'documents_fts_idx'(segid, term, pgno, PRIMARY KEY(segid, term)) WITHOUT ROWID;
CREATE TABLE "documents_tags" ( "document_id" text NOT NULL, "tag_id" text NOT NULL, PRIMARY KEY("document_id", "tag_id"), FOREIGN KEY ("document_id") REFERENCES "documents"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "intake_emails" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "email_address" text NOT NULL, "organization_id" text NOT NULL, "allowed_origins" text DEFAULT '[]' NOT NULL, "is_enabled" integer DEFAULT true NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, run_at INTEGER NOT NULL);
CREATE TABLE "organization_invitations" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "email" text NOT NULL, "role" text NOT NULL, "status" text NOT NULL DEFAULT 'pending', "expires_at" integer NOT NULL, "inviter_id" text NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("inviter_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organization_members" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "user_id" text NOT NULL, "role" text NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organization_subscriptions" ( "id" text PRIMARY KEY NOT NULL, "customer_id" text NOT NULL, "organization_id" text NOT NULL, "plan_id" text NOT NULL, "status" text NOT NULL, "seats_count" integer NOT NULL, "current_period_end" integer NOT NULL, "current_period_start" integer NOT NULL, "cancel_at_period_end" integer DEFAULT false NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "organizations" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "customer_id" text );
CREATE TABLE sqlite_sequence(name,seq);
CREATE TABLE "tagging_rule_actions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "tagging_rule_id" text NOT NULL, "tag_id" text NOT NULL, FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("tag_id") REFERENCES "tags"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tagging_rule_conditions" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "tagging_rule_id" text NOT NULL, "field" text NOT NULL, "operator" text NOT NULL, "value" text NOT NULL, "is_case_sensitive" integer DEFAULT false NOT NULL, FOREIGN KEY ("tagging_rule_id") REFERENCES "tagging_rules"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tagging_rules" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "name" text NOT NULL, "description" text, "enabled" integer DEFAULT true NOT NULL, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "tags" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "organization_id" text NOT NULL, "name" text NOT NULL, "color" text NOT NULL, "description" text, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "user_roles" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "user_id" text NOT NULL, "role" text NOT NULL, FOREIGN KEY ("user_id") REFERENCES "users"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "users" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "email" text NOT NULL, "email_verified" integer DEFAULT false NOT NULL, "name" text, "image" text, "max_organization_count" integer );
CREATE TABLE "webhook_deliveries" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "webhook_id" text NOT NULL, "event_name" text NOT NULL, "request_payload" text NOT NULL, "response_payload" text NOT NULL, "response_status" integer NOT NULL, FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "webhook_events" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "webhook_id" text NOT NULL, "event_name" text NOT NULL, FOREIGN KEY ("webhook_id") REFERENCES "webhooks"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TABLE "webhooks" ( "id" text PRIMARY KEY NOT NULL, "created_at" integer NOT NULL, "updated_at" integer NOT NULL, "name" text NOT NULL, "url" text NOT NULL, "secret" text, "enabled" integer DEFAULT true NOT NULL, "created_by" text, "organization_id" text, FOREIGN KEY ("created_by") REFERENCES "users"("id") ON UPDATE cascade ON DELETE set null, FOREIGN KEY ("organization_id") REFERENCES "organizations"("id") ON UPDATE cascade ON DELETE cascade );
CREATE TRIGGER trigger_documents_fts_delete AFTER DELETE ON documents BEGIN DELETE FROM documents_fts WHERE id = old.id; END;
CREATE TRIGGER trigger_documents_fts_insert AFTER INSERT ON documents BEGIN INSERT INTO documents_fts(id, name, original_name, content) VALUES (new.id, new.name, new.original_name, new.content); END;
CREATE TRIGGER trigger_documents_fts_update AFTER UPDATE ON documents BEGIN UPDATE documents_fts SET name = new.name, original_name = new.original_name, content = new.content WHERE id = new.id; END;"
`);
});
// Maybe a bit fragile, but it's to try to enforce to have migrations fail-safe
test('if for some reasons we drop the migrations table, we can reapply all migrations', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await runMigrations({ db, migrations });
const dbState = await serializeSchema({ db });
await db.run(sql`DROP TABLE migrations`);
await runMigrations({ db, migrations });
expect(await serializeSchema({ db })).to.eq(dbState);
});
});
});

View File

@@ -1,23 +0,0 @@
import type { Migration } from './migrations.types';
import { initialSchemaSetupMigration } from './list/0001-initial-schema-setup.migration';
import { documentsFtsMigration } from './list/0002-documents-fts.migration';
import { taggingRulesMigration } from './list/0003-tagging-rules.migration';
import { apiKeysMigration } from './list/0004-api-keys.migration';
import { organizationsWebhooksMigration } from './list/0005-organizations-webhooks.migration';
import { organizationsInvitationsImprovementMigration } from './list/0006-organizations-invitations-improvement.migration';
import { documentActivityLogMigration } from './list/0007-document-activity-log.migration';
import { documentActivityLogOnDeleteSetNullMigration } from './list/0008-document-activity-log-on-delete-set-null.migration';
import { dropLegacyMigrationsMigration } from './list/0009-drop-legacy-migrations.migration';
export const migrations: Migration[] = [
initialSchemaSetupMigration,
documentsFtsMigration,
taggingRulesMigration,
apiKeysMigration,
organizationsWebhooksMigration,
organizationsInvitationsImprovementMigration,
documentActivityLogMigration,
documentActivityLogOnDeleteSetNullMigration,
dropLegacyMigrationsMigration,
];

View File

@@ -1,29 +0,0 @@
import type { Database } from '../modules/app/database/database.types';
import { asc, eq, sql } from 'drizzle-orm';
import { migrationsTable } from './migration.tables';
export async function setupMigrationTableIfNotExists({ db }: { db: Database }) {
await db.batch([
db.run(sql`CREATE TABLE IF NOT EXISTS migrations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, run_at INTEGER NOT NULL)`),
db.run(sql`CREATE INDEX IF NOT EXISTS migrations_name_index ON migrations (name)`),
db.run(sql`CREATE INDEX IF NOT EXISTS migrations_run_at_index ON migrations (run_at)`),
]);
}
export async function getMigrations({ db }: { db: Database }) {
const migrations = await db.select().from(migrationsTable).orderBy(asc(migrationsTable.runAt));
return { migrations };
}
export async function saveMigration({ db, migrationName, now = new Date() }: { db: Database; migrationName: string; now?: Date }) {
await db.insert(migrationsTable).values({ name: migrationName, runAt: now });
}
export async function deleteMigration({ db, migrationName }: { db: Database; migrationName: string }) {
await db.delete(migrationsTable).where(eq(migrationsTable.name, migrationName));
}
export async function deleteAllMigrations({ db }: { db: Database }) {
await db.delete(migrationsTable);
}

View File

@@ -1,20 +0,0 @@
import type { Database } from '../modules/app/database/database.types';
export type MigrationArguments = {
db: Database;
};
export type Migration = {
/**
* The name of the migration. Must be unique.
*/
name: string;
/**
* Optional description of the migration, serves to add more context to the migration for humans.
*/
description?: string;
up: (args: MigrationArguments) => Promise<unknown>;
down?: (args: MigrationArguments) => Promise<unknown>;
};

View File

@@ -1,141 +0,0 @@
import type { Migration } from './migrations.types';
import { sql } from 'drizzle-orm';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from '../modules/app/database/database';
import { migrationsTable } from './migration.tables';
import { rollbackLastAppliedMigration, runMigrations } from './migrations.usecases';
const createTableUserMigration: Migration = {
name: 'create-table-user',
up: async ({ db }) => {
await db.run(sql`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL)`);
},
down: async ({ db }) => {
await db.run(sql`DROP TABLE users`);
},
};
const createTableOrganizationMigration: Migration = {
name: 'create-table-organization',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE TABLE organizations (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL)`),
db.run(sql`CREATE TABLE organization_members (id INTEGER PRIMARY KEY AUTOINCREMENT, organization_id INTEGER NOT NULL, user_id INTEGER NOT NULL, role TEXT NOT NULL, created_at INTEGER NOT NULL)`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql`DROP TABLE organizations`),
db.run(sql`DROP TABLE organization_members`),
]);
},
};
const createTableDocumentMigration: Migration = {
name: 'create-table-document',
up: async ({ db }) => {
await db.batch([
db.run(sql`CREATE TABLE documents (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, created_at INTEGER NOT NULL)`),
]);
},
down: async ({ db }) => {
await db.run(sql`DROP TABLE documents`);
},
};
describe('migrations usecases', () => {
describe('runMigrations', () => {
test('should run all migrations that are not already applied', async () => {
const { db } = setupDatabase({ url: ':memory:' });
const migrations = [createTableUserMigration, createTableOrganizationMigration];
await runMigrations({ db, migrations });
const migrationsInDb = await db.select().from(migrationsTable);
expect(migrationsInDb.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
{ id: 2, name: 'create-table-organization' },
]);
migrations.push(createTableDocumentMigration);
await runMigrations({ db, migrations });
const migrationsInDb2 = await db.select().from(migrationsTable);
expect(migrationsInDb2.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
{ id: 2, name: 'create-table-organization' },
{ id: 3, name: 'create-table-document' },
]);
const { rows: tables } = await db.run(sql`SELECT name FROM sqlite_master WHERE name NOT LIKE 'sqlite_%'`);
// Ensure all tables and indexes are created
expect(tables.map(t => t.name)).to.eql([
'migrations',
'migrations_name_index',
'migrations_run_at_index',
'users',
'organizations',
'organization_members',
'documents',
]);
});
});
describe('rollbackLastAppliedMigration', () => {
test('the last migration down is called', async () => {
const { db } = setupDatabase({ url: ':memory:' });
const migrations = [createTableUserMigration, createTableDocumentMigration];
await runMigrations({ db, migrations });
const initialMigrations = await db.select().from(migrationsTable);
expect(initialMigrations.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
{ id: 2, name: 'create-table-document' },
]);
// Ensure the tables exists, no error is thrown
await db.run(sql`SELECT * FROM users`);
await db.run(sql`SELECT * FROM documents`);
await rollbackLastAppliedMigration({ db, migrations });
const migrationsInDb = await db.select().from(migrationsTable);
expect(migrationsInDb.map(({ id, name }) => ({ id, name }))).to.eql([
{ id: 1, name: 'create-table-user' },
]);
// Ensure the table document is dropped
await db.run(sql`SELECT * FROM users`);
await expect(db.run(sql`SELECT * FROM documents`)).rejects.toThrow();
});
test('when their is no migration to rollback, nothing is done', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await rollbackLastAppliedMigration({ db });
const migrationsInDb = await db.select().from(migrationsTable);
expect(migrationsInDb).to.eql([]);
});
test('when the last migration in the database does not exist in the migrations list, an error is thrown', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await runMigrations({ db, migrations: [createTableUserMigration] });
await expect(
rollbackLastAppliedMigration({ db, migrations: [] }),
).rejects.toThrow('Migration create-table-user not found');
});
});
});

View File

@@ -1,83 +0,0 @@
import type { Database } from '../modules/app/database/database.types';
import type { Logger } from '../modules/shared/logger/logger';
import type { Migration } from './migrations.types';
import { safely } from '@corentinth/chisels';
import { createLogger } from '../modules/shared/logger/logger';
import { migrations as migrationsList } from './migrations.registry';
import { deleteMigration, getMigrations, saveMigration, setupMigrationTableIfNotExists } from './migrations.repository';
export async function runMigrations({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: Database; migrations?: Migration[]; logger?: Logger }) {
await setupMigrationTableIfNotExists({ db });
if (migrations.length === 0) {
logger.info('No migrations to run, skipping');
return;
}
const { migrations: existingMigrations } = await getMigrations({ db });
const migrationsToRun = migrations.filter(migration => !existingMigrations.some(m => m.name === migration.name));
if (migrationsToRun.length === 0) {
logger.info('All migrations already applied');
return;
}
logger.debug({
migrations: migrations.map(m => m.name),
migrationsToRun: migrationsToRun.map(m => m.name),
existingMigrations: existingMigrations.map(m => m.name),
migrationsToRunCount: migrationsToRun.length,
existingMigrationsCount: existingMigrations.length,
}, 'Running migrations');
for (const migration of migrationsToRun) {
const [, error] = await safely(upMigration({ db, migration }));
if (error) {
logger.error({ error, migrationName: migration.name }, 'Failed to run migration');
throw error;
}
logger.info({ migrationName: migration.name }, 'Migration run successfully');
}
logger.info('All migrations run successfully');
}
async function upMigration({ db, migration }: { db: Database; migration: Migration }) {
const { name, up } = migration;
await up({ db });
await saveMigration({ db, migrationName: name });
}
export async function rollbackLastAppliedMigration({ db, migrations = migrationsList, logger = createLogger({ namespace: 'migrations' }) }: { db: Database; migrations?: Migration[]; logger?: Logger }) {
await setupMigrationTableIfNotExists({ db });
const { migrations: existingMigrations } = await getMigrations({ db });
const lastMigrationInDb = existingMigrations[existingMigrations.length - 1];
if (!lastMigrationInDb) {
logger.info('No migrations to rollback');
return;
}
const lastMigration = migrations.find(m => m.name === lastMigrationInDb.name);
if (!lastMigration) {
logger.error({ migrationName: lastMigrationInDb.name }, 'Migration in database not found in saved migrations');
throw new Error(`Migration ${lastMigrationInDb.name} not found`);
}
await downMigration({ db, migration: lastMigration });
logger.info({ migrationName: lastMigration.name }, 'Migration rolled back successfully');
}
async function downMigration({ db, migration }: { db: Database; migration: Migration }) {
const { name, down } = migration;
await down?.({ db });
await deleteMigration({ db, migrationName: name });
}

View File

@@ -107,27 +107,6 @@ export function getAuth({
deleteUser: { enabled: false },
},
plugins: [
// Would love to have this but it messes with the error handling in better-auth client
// {
// id: 'better-auth-error-adapter',
// onResponse: async (res) => {
// // Transform better auth error to our own error
// if (res.status < 400) {
// return { response: res };
// }
// const body = await res.clone().json();
// const code = get(body, 'code', 'unknown');
// throw createError({
// message: get(body, 'message', 'Unknown error'),
// code: `auth.${code.toLowerCase()}`,
// statusCode: res.status as ContentfulStatusCode,
// isInternal: res.status >= 500,
// });
// },
// },
...(config.auth.providers.customs.length > 0
? [genericOAuth({ config: config.auth.providers.customs })]
: []),

View File

@@ -1,11 +1,19 @@
import type { Config } from '../../config/config.types';
import { dirname } from 'node:path';
import type { Database } from './database.types';
import { dirname, join } from 'node:path';
import { migrate } from 'drizzle-orm/libsql/migrator';
import { ensureDirectoryExists } from '../../shared/fs/fs.services';
import { createLogger } from '../../shared/logger/logger';
import { fileUrlToPath } from '../../shared/path';
import { fileUrlToPath, getRootDirPath } from '../../shared/path';
const logger = createLogger({ namespace: 'database-services' });
export async function runMigrations({ db }: { db: Database }) {
const migrationsFolder = join(getRootDirPath(), 'migrations');
await migrate(db, { migrationsFolder });
}
export async function ensureLocalDatabaseDirectoryExists({ config }: { config: Config }) {
const { url } = config.database;

View File

@@ -1,24 +0,0 @@
import { sql } from 'drizzle-orm';
import { describe, expect, test } from 'vitest';
import { setupDatabase } from './database';
import { serializeSchema } from './database.test-utils';
describe('database-utils test', () => {
describe('serializeSchema', () => {
test('given a database with some tables, it should return the schema as a string, used for db state snapshot', async () => {
const { db } = setupDatabase({ url: ':memory:' });
await db.run(sql`CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)`);
await db.run(sql`CREATE INDEX idx_test_name ON test (name)`);
await db.run(sql`CREATE VIEW test_view AS SELECT * FROM test`);
await db.run(sql`CREATE TRIGGER test_trigger AFTER INSERT ON test BEGIN SELECT 1; END`);
const schema = await serializeSchema({ db });
expect(schema).toMatchInlineSnapshot(`
"CREATE INDEX idx_test_name ON test (name);
CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT);
CREATE TRIGGER test_trigger AFTER INSERT ON test BEGIN SELECT 1; END;
CREATE VIEW test_view AS SELECT * FROM test;"
`);
});
});
});

View File

@@ -1,6 +1,4 @@
import type { Database } from './database.types';
import { sql } from 'drizzle-orm';
import { runMigrations } from '../../../migrations/migrations.usecases';
import { apiKeyOrganizationsTable, apiKeysTable } from '../../api-keys/api-keys.tables';
import { documentsTable } from '../../documents/documents.table';
import { intakeEmailsTable } from '../../intake-emails/intake-emails.tables';
@@ -11,6 +9,7 @@ import { documentsTagsTable, tagsTable } from '../../tags/tags.table';
import { usersTable } from '../../users/users.table';
import { webhookDeliveriesTable, webhookEventsTable, webhooksTable } from '../../webhooks/webhooks.tables';
import { setupDatabase } from './database';
import { runMigrations } from './database.services';
export { createInMemoryDatabase, seedDatabase };
@@ -61,34 +60,3 @@ async function seedDatabase({ db, ...seedRows }: { db: Database } & SeedTablesRo
),
);
}
/*
PRAGMA encoding;
PRAGMA page_size;
PRAGMA auto_vacuum;
PRAGMA journal_mode; -- WAL is persistent
PRAGMA user_version;
PRAGMA application_id;
*/
export async function serializeSchema({ db }: { db: Database }) {
const result = await db.batch([
// db.run(sql`PRAGMA encoding`),
// db.run(sql`PRAGMA page_size`),
// db.run(sql`PRAGMA auto_vacuum`),
// db.run(sql`PRAGMA journal_mode`),
// db.run(sql`PRAGMA user_version`),
// db.run(sql`PRAGMA application_id`),
db.run(sql`SELECT sql FROM sqlite_schema WHERE sql IS NOT NULL AND type IN ('table','index','view','trigger') ORDER BY type, name`),
]);
return Array
.from(result.values())
.flatMap(({ rows }) => rows.map(({ sql }) => minifyQuery(String(sql))))
.join('\n');
}
function minifyQuery(query: string) {
return `${query.replace(/\s+/g, ' ').trim().replace(/;$/, '')};`;
}

View File

@@ -7,7 +7,9 @@ import { ensureLocalDatabaseDirectoryExists } from '../../modules/app/database/d
import { parseConfig } from '../../modules/config/config';
import { createLogger, wrapWithLoggerContext } from '../../modules/shared/logger/logger';
export async function runScriptWithDb(
export { runScript };
async function runScript(
{ scriptName }: { scriptName: string },
fn: (args: { isDryRun: boolean; logger: Logger; db: Database; config: Config }) => Promise<void> | void,
) {
@@ -23,32 +25,18 @@ export async function runScriptWithDb(
const { config } = await parseConfig({ env: process.env });
await ensureLocalDatabaseDirectoryExists({ config });
const { db } = setupDatabase({ ...config.database });
const { db, client } = setupDatabase({ ...config.database });
await executeScript({ logger, fn: async () => fn({ isDryRun, logger, db, config }) });
try {
logger.info('Script started');
await fn({ isDryRun, logger, db, config });
logger.info('Script finished');
} catch (error) {
logger.error({ error }, 'Script failed');
process.exit(1);
} finally {
client.close();
}
},
);
}
export async function runScript(
{ scriptName }: { scriptName: string },
fn: (args: { isDryRun: boolean; logger: Logger }) => Promise<void> | void,
) {
const isDryRun = process.argv.includes('--dry-run');
await wrapWithLoggerContext({ scriptName, isDryRun }, async () => {
const logger = createLogger({ namespace: 'scripts' });
await executeScript({ logger, fn: async () => fn({ isDryRun, logger }) });
});
}
async function executeScript({ logger, fn }: { logger: Logger; fn: () => Promise<unknown> }) {
try {
await fn();
logger.debug('Script finished');
} catch (error) {
logger.error({ error }, 'Script failed');
process.exit(1);
}
}

View File

@@ -1,79 +0,0 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import process from 'node:process';
import { camelCase, kebabCase } from 'lodash-es';
import { builders, loadFile, writeFile } from 'magicast';
import { runScript } from './commons/run-script';
const currentDirectory = import.meta.dirname;
const migrationsDirectory = path.join(currentDirectory, '..', 'migrations', 'list');
async function getLastMigrationFilePrefixNumber() {
const migrations = await fs.readdir(migrationsDirectory);
const lastMigrationFileName = migrations.filter(file => file.endsWith('.migration.ts')).toSorted().pop();
if (lastMigrationFileName === undefined) {
return 0;
}
const [, lastMigrationNumber] = lastMigrationFileName.match(/^(\d+)/) ?? [];
return lastMigrationNumber === undefined ? 0 : Number.parseInt(lastMigrationNumber);
}
await runScript(
{ scriptName: 'create-migration' },
async ({ logger }) => {
const rawMigrationName = process.argv[2];
if (rawMigrationName === undefined || rawMigrationName === '') {
logger.error('Migration name is required, example: pnpm migrate:create <migration-name>');
process.exit(1);
}
const migrationName = kebabCase(rawMigrationName);
const lastMigrationPrefixNumber = await getLastMigrationFilePrefixNumber();
const prefixNumber = (lastMigrationPrefixNumber + 1).toString().padStart(4, '0');
const fileNameWithoutExtension = `${prefixNumber}-${migrationName}.migration`;
const fileName = `${fileNameWithoutExtension}.ts`;
const migrationPath = path.join(migrationsDirectory, fileName);
const migrationObjectIdentifier = `${camelCase(migrationName)}Migration`;
await fs.writeFile(migrationPath, `
import type { Migration } from '../migrations.types';
import { sql } from 'drizzle-orm';
export const ${migrationObjectIdentifier} = {
name: '${migrationName}',
up: async ({ db }) => {
await db.batch([
db.run(sql\`SELECT 1\`),
]);
},
down: async ({ db }) => {
await db.batch([
db.run(sql\`SELECT 1\`),
]);
},
} satisfies Migration;`.trim());
logger.info(`Migration ${fileName} created`);
const registry = await loadFile(path.join(migrationsDirectory, '..', 'migrations.registry.ts'));
registry.imports.$append({
imported: migrationObjectIdentifier,
from: `./list/${fileNameWithoutExtension}`,
});
// eslint-disable-next-line ts/no-unsafe-call, ts/no-unsafe-member-access
registry.exports.migrations.push(builders.raw(migrationObjectIdentifier));
await writeFile(registry, path.join(migrationsDirectory, '..', 'migrations.registry.ts'));
},
);

View File

@@ -1,9 +1,11 @@
import { runMigrations } from '../migrations/migrations.usecases';
import { runScriptWithDb } from './commons/run-script';
import { runMigrations } from '../modules/app/database/database.services';
import { runScript } from './commons/run-script';
await runScriptWithDb(
await runScript(
{ scriptName: 'migrate-up' },
async ({ db }) => {
// Drizzle kit config don't support encryption yet so we cannot use npx drizzle-kit migrate
// to run migrations. We have to run them manually.
await runMigrations({ db });
},
);

View File

@@ -2,9 +2,9 @@ import { buildUrl } from '@corentinth/chisels';
import { triggerWebhook } from '@owlrelay/webhook';
import { getServerBaseUrl } from '../modules/config/config.models';
import { INTAKE_EMAILS_INGEST_ROUTE } from '../modules/intake-emails/intake-emails.constants';
import { runScriptWithDb } from './commons/run-script';
import { runScript } from './commons/run-script';
await runScriptWithDb(
await runScript(
{ scriptName: 'simulate-intake-email' },
async ({ config }) => {
const { serverBaseUrl } = getServerBaseUrl({ config });

View File

@@ -11,10 +11,6 @@ FROM base AS build
WORKDIR /app
# --- add build deps for sharp/node-gyp, needed to be explicitly installed for armv7 ---
RUN apt-get update && apt-get install -y --no-install-recommends python3 make g++ && rm -rf /var/lib/apt/lists/*
ENV npm_config_python=/usr/bin/python3
COPY pnpm-lock.yaml ./
COPY pnpm-workspace.yaml ./
COPY apps/papra-client/package.json apps/papra-client/package.json

View File

@@ -13,10 +13,6 @@ FROM base AS build
WORKDIR /app
# --- add build deps for sharp/node-gyp, needed to be explicitly installed for armv7 ---
RUN apt-get update && apt-get install -y --no-install-recommends python3 make g++ && rm -rf /var/lib/apt/lists/*
ENV npm_config_python=/usr/bin/python3
COPY pnpm-lock.yaml ./
COPY pnpm-workspace.yaml ./
COPY apps/papra-client/package.json apps/papra-client/package.json

View File

@@ -8,9 +8,6 @@
"keywords": [],
"scripts": {
"docker:build:root": "docker build -t papra -f docker/Dockerfile .",
"docker:build:root:armv7": "docker buildx build --platform linux/arm/v7 -t papra -f docker/Dockerfile --load .",
"docker:build:root:amd64": "docker buildx build --platform linux/amd64 -t papra -f docker/Dockerfile --load .",
"docker:build:root:arm64": "docker buildx build --platform linux/arm64 -t papra -f docker/Dockerfile --load .",
"docker:build:rootless": "docker build -t papra-rootless -f docker/Dockerfile.rootless .",
"version": "changeset version && pnpm install --no-frozen-lockfile",
"changeset": "changeset",

View File

@@ -1,7 +0,0 @@
# @papra/lecture
## 0.1.0
### Minor Changes
- [#429](https://github.com/papra-hq/papra/pull/429) [`67b3b14`](https://github.com/papra-hq/papra/commit/67b3b14cdfa994874c695b9d854a93160ba6a911) Thanks [@CorentinTh](https://github.com/CorentinTh)! - Added support for scanned pdf content extraction

View File

@@ -1,7 +1,7 @@
{
"name": "@papra/lecture",
"type": "module",
"version": "0.1.0",
"version": "0.0.7",
"packageManager": "pnpm@10.12.3",
"description": "A simple library to extract text from files",
"author": "Corentin Thomasset <corentinth@proton.me> (https://corentin.tech)",

View File

@@ -0,0 +1,74 @@
# n8n Integration
A community node package that integrates [Papra](https://papra.app) (the document archiving platform) with [n8n](https://n8n.io), enabling you to automate document management workflows.
## Installation
1. In your n8n instance, go to **Settings****Community Nodes**
2. Click **Install** and enter: `@papra/n8n-nodes-papra`
3. Install the package and restart your n8n instance
## Setup
### 1. Create API Credentials
Before using this integration, you need to create API credentials in your Papra workspace:
1. Log in to your Papra instance
2. Navigate to **Settings****API Keys**
3. Click **Create New API Key**
4. Copy the generated API key and your Organization ID (from the url)
For detailed instructions, visit the [Papra API documentation](https://docs.papra.app/resources/api-endpoints/#authentication).
### 2. Configure n8n Credentials
1. In n8n, create a new workflow
2. Add a Papra node
3. Create new credentials with:
- **Papra API URL**: `https://api.papra.app` (or your self-hosted instance URL)
- **Organization ID**: Your organization ID from Papra
- **API Key**: Your generated API key
## Available Operations
| Resource | Operations |
|----------|------------|
| Document | `create`, `list`, `get`, `update`, `remove`, `get_file`, `get_activity` |
| Tag | Standard CRUD operations |
| Document Tag | Link/unlink tags to/from documents |
| Statistics | Retrieve workspace analytics |
| Trash | List deleted documents |
## Development
### Prerequisites
- Node.js 20.15 or higher
- pnpm package manager
- n8n instance for testing
- you can use `pnpx n8n` or `pnpm i -g n8n` command to install n8n globally
### Testing the Integration
#### Option 1: Local n8n Instance
1. Build this package:
```bash
pnpm run build
```
2. Link the package to your local n8n:
```bash
# Navigate to your n8n nodes directory
cd ~/.n8n/nodes
# Install the package locally
npm install /path/to/papra/packages/n8n-nodes
```
3. Start n8n:
```bash
npx n8n
```
4. In n8n, create a new workflow and search for "Papra" to find the node
#### Option 2: Docker
Build a custom n8n Docker image with the Papra node included. Follow the [n8n documentation](https://docs.n8n.io/integrations/creating-nodes/deploy/install-private-nodes/#install-your-node-in-a-docker-n8n-instance) for detailed instructions.

View File

@@ -0,0 +1,41 @@
import type { IAuthenticateGeneric, ICredentialType, INodeProperties } from 'n8n-workflow';
export class PapraApi implements ICredentialType {
name = 'papraApi';
displayName = 'Papra API';
documentationUrl = 'https://docs.papra.app/resources/api-endpoints/#authentication';
properties: INodeProperties[] = [
{
name: 'url',
displayName: 'Papra API URL',
default: 'https://api.papra.app',
required: true,
type: 'string',
validateType: 'url',
},
{
name: 'organization_id',
displayName: 'Organization ID',
default: '',
required: true,
type: 'string',
},
{
name: 'apiKey',
displayName: 'Papra API Key',
default: '',
required: true,
type: 'string',
typeOptions: { password: true },
},
];
authenticate: IAuthenticateGeneric = {
type: 'generic',
properties: {
headers: {
Authorization: '=Bearer {{$credentials.apiKey}}',
},
},
};
}

View File

@@ -0,0 +1,24 @@
import antfu from '@antfu/eslint-config';
export default antfu({
stylistic: {
semi: true,
},
// TODO: include the n8n rules package when it's eslint-9 ready
// https://github.com/ivov/eslint-plugin-n8n-nodes-base/issues/196
rules: {
// To allow export on top of files
'ts/no-use-before-define': ['error', { allowNamedExports: true, functions: false }],
'curly': ['error', 'all'],
'vitest/consistent-test-it': ['error', { fn: 'test' }],
'ts/consistent-type-definitions': ['error', 'type'],
'style/brace-style': ['error', '1tbs', { allowSingleLine: false }],
'unused-imports/no-unused-vars': ['error', {
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
caughtErrorsIgnorePattern: '^_',
}],
},
});

View File

@@ -0,0 +1,16 @@
const path = require('node:path');
const { task, src, dest } = require('gulp');
task('build:icons', copyIcons);
function copyIcons() {
const nodeSource = path.resolve('nodes', '**', '*.{png,svg}');
const nodeDestination = path.resolve('dist', 'nodes');
src(nodeSource).pipe(dest(nodeDestination));
const credSource = path.resolve('credentials', '**', '*.{png,svg}');
const credDestination = path.resolve('dist', 'credentials');
return src(credSource).pipe(dest(credDestination));
}

View File

View File

@@ -0,0 +1,18 @@
{
"node": "n8n-nodes-base.papra",
"nodeVersion": "1.0",
"codexVersion": "1.0",
"categories": ["Data & Storage"],
"resources": {
"credentialDocumentation": [
{
"url": "https://docs.papra.app/resources/api-endpoints/#authentication"
}
],
"primaryDocumentation": [
{
"url": "https://docs.papra.app/"
}
]
}
}

View File

@@ -0,0 +1,24 @@
import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow';
import { VersionedNodeType } from 'n8n-workflow';
import { PapraV1 } from './v1/PapraV1.node';
export class Papra extends VersionedNodeType {
constructor() {
const baseDescription: INodeTypeBaseDescription = {
displayName: 'Papra',
name: 'papra',
icon: 'file:papra.svg',
group: ['input'],
description: 'Read, update, write and delete data from Papra',
defaultVersion: 1,
usableAsTool: true,
};
const nodeVersions: IVersionedNodeType['nodeVersions'] = {
1: new PapraV1(baseDescription),
};
super(nodeVersions, baseDescription);
}
}

View File

@@ -0,0 +1,31 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeType,
INodeTypeBaseDescription,
INodeTypeDescription,
} from 'n8n-workflow';
import { router } from './actions/router';
import * as version from './actions/version';
import { listSearch } from './methods';
export class PapraV1 implements INodeType {
description: INodeTypeDescription;
constructor(baseDescription: INodeTypeBaseDescription) {
this.description = {
...baseDescription,
...version.description,
usableAsTool: true,
};
}
methods = {
listSearch,
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
return await router.call(this);
}
}

View File

@@ -0,0 +1,83 @@
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
import { Buffer } from 'node:buffer';
import FormData from 'form-data';
import { apiRequest } from '../../transport/index.js';
export const description: INodeProperties[] = [
{
displayName: 'Input Binary Field',
name: 'binary_property_name',
default: 'data',
displayOptions: {
show: {
resource: ['document'],
operation: ['create'],
},
},
hint: 'The name of the input field containing the file data to be processed',
required: true,
type: 'string',
},
{
displayName: 'Additional Fields',
name: 'additional_fields',
type: 'collection',
default: {},
displayOptions: {
show: {
resource: ['document'],
operation: ['create'],
},
},
placeholder: 'Add Field',
options: [
{
displayName: 'OCR languages',
name: 'ocr_languages',
default: '',
description: 'The languages of the document',
type: 'string',
},
],
},
];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const endpoint = `/documents`;
const formData = new FormData();
const binaryPropertyName = this.getNodeParameter('binary_property_name', itemIndex) as string;
const binaryData = this.helpers.assertBinaryData(itemIndex, binaryPropertyName);
const data = binaryData.id
? await this.helpers.getBinaryStream(binaryData.id)
: Buffer.from(binaryData.data, 'base64');
formData.append('file', data, {
filename: binaryData.fileName,
contentType: binaryData.mimeType,
});
const additionalFields = this.getNodeParameter('additional_fields', itemIndex) as any;
Object.entries({
ocrLanguages: additionalFields.ocr_languages,
})
.filter(([, value]) => value !== undefined && value !== '')
.forEach(([key, value]) => {
formData.append(key, value);
});
const response = (await apiRequest.call(
this,
itemIndex,
'POST',
endpoint,
undefined,
undefined,
{ headers: formData.getHeaders(), formData },
)) as any;
return { json: { results: [response] } };
}

View File

@@ -0,0 +1,77 @@
import type { INodeProperties } from 'n8n-workflow';
import * as create from './create.operation';
import * as get from './get.operation';
import * as get_activity from './get_activity.operation';
import * as get_file from './get_file.operation';
import * as list from './list.operation';
import * as remove from './remove.operation';
import * as update from './update.operation';
export {
create,
get,
get_activity,
get_file,
list,
remove,
update,
};
export const description: INodeProperties[] = [
{
displayName: 'Operation',
name: 'operation',
default: 'list',
displayOptions: {
show: { resource: ['document'] },
},
noDataExpression: true,
options: [
{
name: 'Create a document',
value: 'create',
action: 'Create a new document',
},
{
name: 'List documents',
value: 'list',
action: 'List all documents',
},
{
name: 'Update a document',
value: 'update',
action: 'Update a document',
},
{
name: 'Get a document',
value: 'get',
action: 'Get a document',
},
{
name: 'Get the document file',
value: 'get_file',
action: 'Get the file of the document',
},
{
name: 'Delete a document',
value: 'remove',
action: 'Delete a document',
},
{
name: 'Get the document activity log',
value: 'get_activity',
action: 'Get the activity log of a document',
},
],
type: 'options',
},
...create.description,
...list.description,
...update.description,
...get.description,
...get_file.description,
...get_activity.description,
...remove.description,
];

View File

@@ -0,0 +1,83 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeParameterResourceLocator,
INodeProperties,
} from 'n8n-workflow';
import { apiRequest } from '../../transport';
export const description: INodeProperties[] = [
{
displayName: 'ID',
name: 'id',
default: { mode: 'list', value: '' },
displayOptions: {
show: {
resource: ['document'],
operation: ['get'],
},
},
modes: [
{
displayName: 'From List',
name: 'list',
placeholder: `Select a Document...`,
type: 'list',
typeOptions: {
searchListMethod: 'documentSearch',
searchFilterRequired: false,
searchable: true,
},
},
{
displayName: 'By ID',
name: 'id',
placeholder: `Enter Document ID...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^[a-zA-Z0-9_]+$',
errorMessage: 'The ID must be valid',
},
},
],
},
{
displayName: 'By URL',
name: 'url',
placeholder: `Enter Document URL...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
errorMessage: 'The URL must be a valid Papra document URL (e.g. https://papra.example.com/organizations/org_xxx/documents/doc_xxx?tab=info)',
},
},
],
extractValue: {
type: 'regex',
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
},
},
],
placeholder: 'ID of the document',
required: true,
type: 'resourceLocator',
},
];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const id = (this.getNodeParameter('id', itemIndex) as INodeParameterResourceLocator).value;
const endpoint = `/documents/${id}`;
const response = (await apiRequest.call(this, itemIndex, 'GET', endpoint)) as any;
return { json: { results: [response] } };
}

View File

@@ -0,0 +1,100 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeParameterResourceLocator,
INodeProperties,
} from 'n8n-workflow';
import {
NodeOperationError,
} from 'n8n-workflow';
import { apiRequestPaginated } from '../../transport';
export const description: INodeProperties[] = [
{
displayName: 'ID',
name: 'id',
default: { mode: 'list', value: '' },
displayOptions: {
show: {
resource: ['document'],
operation: ['get_activity'],
},
},
modes: [
{
displayName: 'From List',
name: 'list',
placeholder: `Select a Document...`,
type: 'list',
typeOptions: {
searchListMethod: 'documentSearch',
searchFilterRequired: false,
searchable: true,
},
},
{
displayName: 'By ID',
name: 'id',
placeholder: `Enter Document ID...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^[a-zA-Z0-9_]+$',
errorMessage: 'The ID must be valid',
},
},
],
},
{
displayName: 'By URL',
name: 'url',
placeholder: `Enter Document URL...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
errorMessage: 'The URL must be a valid Papra document URL (e.g. https://papra.example.com/organizations/org_xxx/documents/doc_xxx?tab=info)',
},
},
],
extractValue: {
type: 'regex',
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
},
},
],
placeholder: 'ID of the document',
required: true,
type: 'resourceLocator',
},
];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const id = (this.getNodeParameter('id', itemIndex) as INodeParameterResourceLocator).value;
const endpoint = `/documents/${id}/activity`;
const responses = (await apiRequestPaginated.call(this, itemIndex, 'GET', endpoint)) as any[];
const statusCode = responses.reduce((acc, response) => acc + response.statusCode, 0) / responses.length;
if (statusCode !== 200) {
throw new NodeOperationError(
this.getNode(),
`The documents you are requesting could not be found`,
{
description: JSON.stringify(
responses.map(response => response?.body?.details ?? response?.statusMessage),
),
},
);
}
return {
json: { results: responses.flatMap(response => response.body.activities) },
};
}

View File

@@ -0,0 +1,111 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeParameterResourceLocator,
INodeProperties,
} from 'n8n-workflow';
import { Buffer } from 'node:buffer';
import { apiRequest } from '../../transport';
export const description: INodeProperties[] = [
{
displayName: 'ID',
name: 'id',
default: { mode: 'list', value: '' },
displayOptions: {
show: {
resource: ['document'],
operation: ['get_file'],
},
},
modes: [
{
displayName: 'From List',
name: 'list',
placeholder: `Select a Document...`,
type: 'list',
typeOptions: {
searchListMethod: 'documentSearch',
searchFilterRequired: false,
searchable: true,
},
},
{
displayName: 'By ID',
name: 'id',
placeholder: `Enter Document ID...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^[a-zA-Z0-9_]+$',
errorMessage: 'The ID must be valid',
},
},
],
},
{
displayName: 'By URL',
name: 'url',
placeholder: `Enter Document URL...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
errorMessage: 'The URL must be a valid Papra document URL (e.g. https://papra.example.com/organizations/org_xxx/documents/doc_xxx?tab=info)',
},
},
],
extractValue: {
type: 'regex',
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
},
},
],
placeholder: 'ID of the document',
required: true,
type: 'resourceLocator',
},
];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const id = (this.getNodeParameter('id', itemIndex) as INodeParameterResourceLocator).value;
const endpoint = `/documents/${id}`;
const preview = (await apiRequest.call(
this,
itemIndex,
'GET',
`${endpoint}/file`,
undefined,
undefined,
{
json: false,
encoding: null,
resolveWithFullResponse: true,
},
)) as any;
// TODO: fix
const filename = preview.headers['content-disposition']
?.match(/filename="(?:b['"])?([^"]+)['"]?"/)?.[1]
?.replace(/^['"]|['"]$/g, '') ?? `${id}.pdf`;
const mimeType = preview.headers['content-type'];
return {
json: {},
binary: {
data: await this.helpers.prepareBinaryData(
Buffer.from(preview.body),
filename,
mimeType,
),
},
};
}

View File

@@ -0,0 +1,37 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeProperties,
} from 'n8n-workflow';
import {
NodeOperationError,
} from 'n8n-workflow';
import { apiRequestPaginated } from '../../transport';
export const description: INodeProperties[] = [];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const endpoint = '/documents';
const responses = (await apiRequestPaginated.call(this, itemIndex, 'GET', endpoint)) as any[];
const statusCode = responses.reduce((acc, response) => acc + response.statusCode, 0) / responses.length;
if (statusCode !== 200) {
throw new NodeOperationError(
this.getNode(),
`The documents you are requesting could not be found`,
{
description: JSON.stringify(
responses.map(response => response?.body?.error?.message ?? response?.error?.code),
),
},
);
}
return {
json: { results: responses.flatMap(response => response.body.documents) },
};
}

View File

@@ -0,0 +1,82 @@
import type {
IExecuteFunctions,
INodeExecutionData,
INodeParameterResourceLocator,
INodeProperties,
} from 'n8n-workflow';
import { apiRequest } from '../../transport';
export const description: INodeProperties[] = [
{
displayName: 'ID',
name: 'id',
default: { mode: 'list', value: '' },
displayOptions: {
show: {
resource: ['document'],
operation: ['remove'],
},
},
modes: [
{
displayName: 'From List',
name: 'list',
placeholder: `Select a Document...`,
type: 'list',
typeOptions: {
searchListMethod: 'documentSearch',
searchFilterRequired: false,
searchable: true,
},
},
{
displayName: 'By ID',
name: 'id',
placeholder: `Enter Document ID...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^[a-zA-Z0-9_]+$',
errorMessage: 'The ID must be valid',
},
},
],
},
{
displayName: 'By URL',
name: 'url',
placeholder: `Enter Document URL...`,
type: 'string',
validation: [
{
type: 'regex',
properties: {
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
errorMessage: 'The URL must be a valid Papra document URL (e.g. https://papra.example.com/organizations/org_xxx/documents/doc_xxx?tab=info)',
},
},
],
extractValue: {
type: 'regex',
regex: '^(?:http|https)://(?:.+?)/documents/([a-zA-Z0-9_]+)/?(?:\\?.*)?$',
},
},
],
placeholder: 'ID of the document',
required: true,
type: 'resourceLocator',
},
];
export async function execute(
this: IExecuteFunctions,
itemIndex: number,
): Promise<INodeExecutionData> {
const id = (this.getNodeParameter('id', itemIndex) as INodeParameterResourceLocator).value;
const endpoint = `/documents/${id}`;
await apiRequest.call(this, itemIndex, 'DELETE', endpoint);
return { json: { results: [true] } };
}

Some files were not shown because too many files have changed in this diff Show More