feat: deterministic SQL export for PostgreSQL to MySQL/MSSQL (#1030)

* feat: deterministic SQL export for PostgreSQL to MySQL/MSSQL

* fix

* fix

---------

Co-authored-by: Guy Ben-Aharon <baguy3@gmail.com>
This commit is contained in:
Jonathan Fishner
2025-12-22 12:15:57 +02:00
committed by GitHub
parent d210319c3b
commit fb19a7ac2f
10 changed files with 3384 additions and 4 deletions

View File

@@ -18,10 +18,11 @@ import {
exportBaseSQL,
exportSQL,
} from '@/lib/data/sql-export/export-sql-script';
import { hasCrossDialectSupport } from '@/lib/data/sql-export/cross-dialect';
import { databaseTypeToLabelMap } from '@/lib/databases';
import { DatabaseType } from '@/lib/domain/database-type';
import { Annoyed, Sparkles } from 'lucide-react';
import React, { useCallback, useEffect, useRef } from 'react';
import { Annoyed, Sparkles, Blocks, Wand2 } from 'lucide-react';
import React, { useCallback, useEffect, useMemo, useRef } from 'react';
import { Trans, useTranslation } from 'react-i18next';
import type { BaseDialogProps } from '../common/base-dialog-props';
import type { Diagram } from '@/lib/domain/diagram';
@@ -49,8 +50,30 @@ export const ExportSQLDialog: React.FC<ExportSQLDialogProps> = ({
const [error, setError] = React.useState<boolean>(false);
const [isScriptLoading, setIsScriptLoading] =
React.useState<boolean>(false);
const [useAIExport, setUseAIExport] = React.useState<boolean>(false);
const abortControllerRef = useRef<AbortController | null>(null);
// Check if a deterministic export path is available
const hasDeterministicPath = useMemo(() => {
return (
targetDatabaseType === DatabaseType.GENERIC ||
currentDiagram.databaseType === targetDatabaseType ||
hasCrossDialectSupport(
currentDiagram.databaseType,
targetDatabaseType
)
);
}, [targetDatabaseType, currentDiagram.databaseType]);
// Show toggle only for cross-dialect exports where both options are available
const showExportModeToggle = useMemo(() => {
return (
hasDeterministicPath &&
currentDiagram.databaseType !== targetDatabaseType &&
targetDatabaseType !== DatabaseType.GENERIC
);
}, [hasDeterministicPath, currentDiagram.databaseType, targetDatabaseType]);
const exportSQLScript = useCallback(async () => {
const filteredDiagram: Diagram = {
...currentDiagram,
@@ -120,7 +143,8 @@ export const ExportSQLDialog: React.FC<ExportSQLDialogProps> = ({
}),
};
if (targetDatabaseType === DatabaseType.GENERIC) {
// Use deterministic export if available and AI export is not selected
if (hasDeterministicPath && !useAIExport) {
return Promise.resolve(
exportBaseSQL({
diagram: filteredDiagram,
@@ -135,7 +159,13 @@ export const ExportSQLDialog: React.FC<ExportSQLDialogProps> = ({
signal: abortControllerRef.current?.signal,
});
}
}, [targetDatabaseType, currentDiagram, filter]);
}, [
targetDatabaseType,
currentDiagram,
filter,
hasDeterministicPath,
useAIExport,
]);
useEffect(() => {
if (!dialog.open) {
@@ -249,6 +279,36 @@ export const ExportSQLDialog: React.FC<ExportSQLDialogProps> = ({
],
})}
</DialogDescription>
{showExportModeToggle && (
<div className="flex items-center pt-2">
<div className="grid h-auto grid-cols-2 gap-1 rounded-xl border bg-background p-1">
<button
type="button"
className={`inline-flex items-center justify-center gap-1.5 rounded-lg px-3 py-1 text-xs transition-all ${
!useAIExport
? 'bg-secondary text-secondary-foreground shadow-sm'
: 'text-muted-foreground hover:bg-muted/50 hover:text-foreground'
}`}
onClick={() => setUseAIExport(false)}
>
<Blocks className="size-3" />
Deterministic
</button>
<button
type="button"
className={`inline-flex items-center justify-center gap-1.5 rounded-lg px-3 py-1 text-xs transition-all ${
useAIExport
? 'bg-secondary text-secondary-foreground shadow-sm'
: 'text-muted-foreground hover:bg-muted/50 hover:text-foreground'
}`}
onClick={() => setUseAIExport(true)}
>
<Wand2 className="size-3" />
AI
</button>
</div>
</div>
)}
</DialogHeader>
<DialogInternalContent>
<div className="flex flex-1 items-center justify-center">

View File

@@ -0,0 +1,849 @@
import { describe, it, expect } from 'vitest';
import { exportPostgreSQLToMySQL } from '../cross-dialect/postgresql/to-mysql';
import { exportPostgreSQLToMSSQL } from '../cross-dialect/postgresql/to-mssql';
import { exportBaseSQL } from '../export-sql-script';
import { DatabaseType } from '@/lib/domain/database-type';
import type { Diagram } from '@/lib/domain/diagram';
import type { DBTable } from '@/lib/domain/db-table';
import type { DBField } from '@/lib/domain/db-field';
import {
type DBCustomType,
DBCustomTypeKind,
} from '@/lib/domain/db-custom-type';
describe('Cross-Dialect Export Tests', () => {
let idCounter = 0;
const testId = () => `test-id-${++idCounter}`;
const testTime = Date.now();
const createField = (overrides: Partial<DBField>): DBField =>
({
id: testId(),
name: 'field',
type: { id: 'text', name: 'text' },
primaryKey: false,
nullable: true,
unique: false,
createdAt: testTime,
...overrides,
}) as DBField;
const createTable = (overrides: Partial<DBTable>): DBTable =>
({
id: testId(),
name: 'table',
fields: [],
indexes: [],
createdAt: testTime,
x: 0,
y: 0,
width: 200,
...overrides,
}) as DBTable;
const createDiagram = (overrides: Partial<Diagram>): Diagram =>
({
id: testId(),
name: 'diagram',
databaseType: DatabaseType.POSTGRESQL,
tables: [],
relationships: [],
createdAt: testTime,
updatedAt: testTime,
...overrides,
}) as Diagram;
describe('PostgreSQL to MySQL Export', () => {
describe('Type Conversions', () => {
it('should convert basic integer types', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'users',
fields: [
createField({
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
nullable: false,
}),
createField({
name: 'count',
type: { id: 'bigint', name: 'bigint' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('`id` INT NOT NULL');
expect(result).toContain('`count` BIGINT');
});
it('should convert boolean to TINYINT(1)', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'flags',
fields: [
createField({
name: 'is_active',
type: { id: 'boolean', name: 'boolean' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('TINYINT(1)');
});
it('should convert UUID to CHAR(36) with comment', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'entities',
fields: [
createField({
name: 'external_id',
type: { id: 'uuid', name: 'uuid' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('CHAR(36)');
expect(result).toContain('-- Was: uuid');
});
it('should convert JSONB to JSON with inline comment', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'documents',
fields: [
createField({
name: 'data',
type: { id: 'jsonb', name: 'jsonb' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('JSON');
expect(result).toContain('-- Was: jsonb');
});
it('should convert array types to JSON', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'posts',
fields: [
createField({
name: 'tags',
type: { id: 'text[]', name: 'text[]' },
isArray: true,
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('JSON');
expect(result).toContain('PostgreSQL array');
});
it('should convert SERIAL to INT AUTO_INCREMENT', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'items',
fields: [
createField({
name: 'id',
type: { id: 'serial', name: 'serial' },
primaryKey: true,
nullable: false,
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('INT');
expect(result).toContain('AUTO_INCREMENT');
});
it('should convert nextval default to AUTO_INCREMENT', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'items',
fields: [
createField({
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
nullable: false,
default:
"nextval('items_id_seq'::regclass)",
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('AUTO_INCREMENT');
expect(result).not.toContain('nextval');
});
it('should convert timestamptz to DATETIME with warning', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'events',
fields: [
createField({
name: 'occurred_at',
type: {
id: 'timestamptz',
name: 'timestamptz',
},
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('DATETIME');
expect(result).toContain('-- Was: timestamptz');
});
it('should convert inet to VARCHAR(45)', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'connections',
fields: [
createField({
name: 'ip_address',
type: { id: 'inet', name: 'inet' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('VARCHAR(45)');
});
});
describe('ENUM Types', () => {
it('should convert ENUM to VARCHAR with values comment', () => {
const customTypes: DBCustomType[] = [
{
id: testId(),
name: 'status_type',
kind: DBCustomTypeKind.enum,
values: ['pending', 'active', 'closed'],
},
];
const diagram = createDiagram({
customTypes,
tables: [
createTable({
name: 'tickets',
fields: [
createField({
name: 'status',
type: {
id: 'status_type',
name: 'status_type',
},
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('VARCHAR');
expect(result).toContain("'pending', 'active', 'closed'");
});
});
describe('Schema Handling', () => {
it('should convert PostgreSQL schema to MySQL database', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'users',
schema: 'app',
fields: [
createField({
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
nullable: false,
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('CREATE DATABASE IF NOT EXISTS `app`');
expect(result).toContain('`app`.`users`');
});
});
describe('Default Values', () => {
it('should convert now() to CURRENT_TIMESTAMP', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'logs',
fields: [
createField({
name: 'created_at',
type: {
id: 'timestamp',
name: 'timestamp',
},
default: 'now()',
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('DEFAULT CURRENT_TIMESTAMP');
});
it('should convert gen_random_uuid() to UUID()', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'entities',
fields: [
createField({
name: 'id',
type: { id: 'uuid', name: 'uuid' },
default: 'gen_random_uuid()',
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('DEFAULT (UUID())');
});
});
describe('Warnings Header', () => {
it('should include conversion notes header', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'test',
fields: [
createField({
name: 'data',
type: { id: 'jsonb', name: 'jsonb' },
}),
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('-- PostgreSQL to MySQL conversion');
expect(result).toContain('-- Generated by ChartDB');
});
});
});
describe('PostgreSQL to SQL Server Export', () => {
describe('Type Conversions', () => {
it('should convert boolean to BIT', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'flags',
fields: [
createField({
name: 'is_active',
type: { id: 'boolean', name: 'boolean' },
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('BIT');
});
it('should convert UUID to UNIQUEIDENTIFIER', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'entities',
fields: [
createField({
name: 'id',
type: { id: 'uuid', name: 'uuid' },
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('UNIQUEIDENTIFIER');
});
it('should convert TEXT to NVARCHAR(MAX)', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'articles',
fields: [
createField({
name: 'content',
type: { id: 'text', name: 'text' },
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('NVARCHAR(MAX)');
});
it('should convert SERIAL to INT IDENTITY', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'items',
fields: [
createField({
name: 'id',
type: { id: 'serial', name: 'serial' },
primaryKey: true,
nullable: false,
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('INT');
expect(result).toContain('IDENTITY(1,1)');
});
it('should convert timestamptz to DATETIMEOFFSET', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'events',
fields: [
createField({
name: 'occurred_at',
type: {
id: 'timestamptz',
name: 'timestamptz',
},
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('DATETIMEOFFSET');
});
it('should convert JSON/JSONB to NVARCHAR(MAX)', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'documents',
fields: [
createField({
name: 'data',
type: { id: 'jsonb', name: 'jsonb' },
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('NVARCHAR(MAX)');
});
});
describe('Default Values', () => {
it('should convert now() to GETDATE()', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'logs',
fields: [
createField({
name: 'created_at',
type: {
id: 'timestamp',
name: 'timestamp',
},
default: 'now()',
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('DEFAULT GETDATE()');
});
it('should convert gen_random_uuid() to NEWID()', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'entities',
fields: [
createField({
name: 'id',
type: { id: 'uuid', name: 'uuid' },
default: 'gen_random_uuid()',
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('DEFAULT NEWID()');
});
it('should convert true/false to 1/0', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'settings',
fields: [
createField({
name: 'is_enabled',
type: { id: 'boolean', name: 'boolean' },
default: 'true',
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('DEFAULT 1');
});
});
describe('Schema Handling', () => {
it('should create SQL Server schema', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'users',
schema: 'app',
fields: [
createField({
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
nullable: false,
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain(
"SELECT * FROM sys.schemas WHERE name = 'app'"
);
expect(result).toContain('[app].[users]');
});
});
describe('Comments via Extended Properties', () => {
it('should add table comments as extended properties', () => {
const diagram = createDiagram({
tables: [
createTable({
name: 'users',
comments: 'User accounts table',
fields: [
createField({
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
nullable: false,
}),
],
}),
],
});
const result = exportPostgreSQLToMSSQL({ diagram });
expect(result).toContain('sp_addextendedproperty');
expect(result).toContain('User accounts table');
});
});
});
describe('Export Routing via exportBaseSQL', () => {
it('should route PostgreSQL to MySQL through deterministic exporter', () => {
const diagram = createDiagram({
databaseType: DatabaseType.POSTGRESQL,
tables: [
createTable({
name: 'test',
fields: [
createField({
name: 'id',
type: { id: 'uuid', name: 'uuid' },
}),
],
}),
],
});
const result = exportBaseSQL({
diagram,
targetDatabaseType: DatabaseType.MYSQL,
});
// Should use deterministic export (CHAR(36) for UUID)
expect(result).toContain('CHAR(36)');
expect(result).toContain('-- PostgreSQL to MySQL conversion');
});
it('should route PostgreSQL to SQL Server through deterministic exporter', () => {
const diagram = createDiagram({
databaseType: DatabaseType.POSTGRESQL,
tables: [
createTable({
name: 'test',
fields: [
createField({
name: 'id',
type: { id: 'uuid', name: 'uuid' },
}),
],
}),
],
});
const result = exportBaseSQL({
diagram,
targetDatabaseType: DatabaseType.SQL_SERVER,
});
// Should use deterministic export (UNIQUEIDENTIFIER for UUID)
expect(result).toContain('UNIQUEIDENTIFIER');
expect(result).toContain('-- PostgreSQL to SQL Server conversion');
});
it('should route PostgreSQL to MariaDB through MySQL deterministic exporter', () => {
const diagram = createDiagram({
databaseType: DatabaseType.POSTGRESQL,
tables: [
createTable({
name: 'test',
fields: [
createField({
name: 'active',
type: { id: 'boolean', name: 'boolean' },
}),
],
}),
],
});
const result = exportBaseSQL({
diagram,
targetDatabaseType: DatabaseType.MARIADB,
});
// Should use MySQL-style conversion (TINYINT(1) for boolean)
expect(result).toContain('TINYINT(1)');
});
});
describe('Index Handling', () => {
it('should downgrade GIN index to BTREE for MySQL', () => {
const fieldId = testId();
const diagram = createDiagram({
tables: [
createTable({
name: 'documents',
fields: [
createField({
id: fieldId,
name: 'data',
type: { id: 'jsonb', name: 'jsonb' },
}),
],
indexes: [
{
id: testId(),
name: 'idx_data',
unique: false,
fieldIds: [fieldId],
createdAt: testTime,
type: 'gin',
},
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('CREATE INDEX');
expect(result).toContain('-- GIN index downgraded to BTREE');
});
it('should add prefix length for JSON indexes in MySQL', () => {
const fieldId = testId();
const diagram = createDiagram({
tables: [
createTable({
name: 'documents',
fields: [
createField({
id: fieldId,
name: 'data',
type: { id: 'jsonb', name: 'jsonb' },
}),
],
indexes: [
{
id: testId(),
name: 'idx_data',
unique: false,
fieldIds: [fieldId],
createdAt: testTime,
},
],
}),
],
});
const result = exportPostgreSQLToMySQL({ diagram });
// JSON columns in MySQL need prefix length for indexing
expect(result).toContain('(255)');
});
});
describe('Foreign Key Handling', () => {
it('should generate foreign keys with MySQL syntax', () => {
const sourceFieldId = testId();
const targetFieldId = testId();
const sourceTableId = testId();
const targetTableId = testId();
const diagram = createDiagram({
tables: [
createTable({
id: sourceTableId,
name: 'orders',
fields: [
createField({
id: sourceFieldId,
name: 'user_id',
type: { id: 'int', name: 'int' },
}),
],
}),
createTable({
id: targetTableId,
name: 'users',
fields: [
createField({
id: targetFieldId,
name: 'id',
type: { id: 'int', name: 'int' },
primaryKey: true,
}),
],
}),
],
relationships: [
{
id: testId(),
name: 'fk_orders_users',
sourceTableId,
targetTableId,
sourceFieldId,
targetFieldId,
sourceCardinality: 'many',
targetCardinality: 'one',
createdAt: testTime,
},
],
});
const result = exportPostgreSQLToMySQL({ diagram });
expect(result).toContain('ALTER TABLE');
expect(result).toContain('ADD CONSTRAINT');
expect(result).toContain('FOREIGN KEY');
expect(result).toContain('REFERENCES');
});
});
});

View File

@@ -0,0 +1,131 @@
/**
* Common utilities for cross-dialect SQL export.
* These utilities are shared across all cross-dialect exporters.
*/
import type { Diagram } from '@/lib/domain/diagram';
import type { DBTable } from '@/lib/domain/db-table';
export function isFunction(value: string): boolean {
// Common SQL functions
const functionPatterns = [
/^CURRENT_TIMESTAMP$/i,
/^NOW\(\)$/i,
/^GETDATE\(\)$/i,
/^CURRENT_DATE$/i,
/^CURRENT_TIME$/i,
/^UUID\(\)$/i,
/^NEWID\(\)$/i,
/^NEXT VALUE FOR/i,
/^IDENTITY\s*\(\d+,\s*\d+\)$/i,
];
return functionPatterns.some((pattern) => pattern.test(value.trim()));
}
export function isKeyword(value: string): boolean {
// Common SQL keywords that can be used as default values
const keywords = [
'NULL',
'TRUE',
'FALSE',
'CURRENT_TIMESTAMP',
'CURRENT_DATE',
'CURRENT_TIME',
'CURRENT_USER',
'SESSION_USER',
'SYSTEM_USER',
];
return keywords.includes(value.trim().toUpperCase());
}
export function strHasQuotes(value: string): boolean {
return /^['"].*['"]$/.test(value.trim());
}
export function exportFieldComment(comment: string): string {
if (!comment) {
return '';
}
return comment
.split('\n')
.map((commentLine) => ` -- ${commentLine}\n`)
.join('');
}
export function escapeSQLComment(comment: string): string {
if (!comment) {
return '';
}
// Escape single quotes by doubling them
let escaped = comment.replace(/'/g, "''");
// Replace newlines with spaces to prevent breaking SQL syntax
// Some databases support multi-line comments with specific syntax,
// but for maximum compatibility, we'll replace newlines with spaces
escaped = escaped.replace(/[\r\n]+/g, ' ');
// Trim any excessive whitespace
escaped = escaped.replace(/\s+/g, ' ').trim();
return escaped;
}
export function formatTableComment(comment: string): string {
if (!comment) {
return '';
}
// Split by newlines and add -- to each line
return (
comment
.split('\n')
.map((line) => `-- ${line}`)
.join('\n') + '\n'
);
}
export function formatMSSQLTableComment(comment: string): string {
if (!comment) {
return '';
}
// For MSSQL, we use multi-line comment syntax
// Escape */ to prevent breaking the comment block
const escaped = comment.replace(/\*\//g, '* /');
return `/**\n${escaped}\n*/\n`;
}
export function getInlineFK(table: DBTable, diagram: Diagram): string {
if (!diagram.relationships) {
return '';
}
const fks = diagram.relationships
.filter((r) => r.sourceTableId === table.id)
.map((r) => {
const targetTable = diagram.tables?.find(
(t) => t.id === r.targetTableId
);
const sourceField = table.fields.find(
(f) => f.id === r.sourceFieldId
);
const targetField = targetTable?.fields.find(
(f) => f.id === r.targetFieldId
);
if (!targetTable || !sourceField || !targetField) {
return '';
}
const targetTableName = targetTable.schema
? `"${targetTable.schema}"."${targetTable.name}"`
: `"${targetTable.name}"`;
return ` FOREIGN KEY ("${sourceField.name}") REFERENCES ${targetTableName}("${targetField.name}")`;
})
.filter(Boolean);
return fks.join(',\n');
}

View File

@@ -0,0 +1,90 @@
/**
* Cross-dialect SQL export module.
* Provides deterministic conversion between different database dialects.
*/
import { DatabaseType } from '@/lib/domain/database-type';
// Re-export types
export type {
TypeMapping,
TypeMappingTable,
IndexTypeMapping,
IndexTypeMappingTable,
} from './types';
// Re-export PostgreSQL exporters
export { exportPostgreSQLToMySQL } from './postgresql/to-mysql';
export { exportPostgreSQLToMSSQL } from './postgresql/to-mssql';
// Re-export unsupported features detection
export {
detectUnsupportedFeatures,
formatWarningsHeader,
getFieldInlineComment,
getIndexInlineComment,
} from './unsupported-features';
export type {
UnsupportedFeature,
UnsupportedFeatureType,
} from './unsupported-features';
/**
* Supported cross-dialect conversion paths.
* Maps source database type to an array of supported target database types.
*/
const CROSS_DIALECT_SUPPORT: Partial<Record<DatabaseType, DatabaseType[]>> = {
[DatabaseType.POSTGRESQL]: [
DatabaseType.MYSQL,
DatabaseType.MARIADB,
DatabaseType.SQL_SERVER,
],
};
/**
* Check if deterministic cross-dialect export is supported from source to target database type.
*
* @param sourceDatabaseType - The source database type (diagram's original database)
* @param targetDatabaseType - The target database type for export
* @returns true if deterministic cross-dialect export is available, false otherwise
*
* @example
* ```ts
* hasCrossDialectSupport(DatabaseType.POSTGRESQL, DatabaseType.MYSQL) // true
* hasCrossDialectSupport(DatabaseType.POSTGRESQL, DatabaseType.SQL_SERVER) // true
* hasCrossDialectSupport(DatabaseType.MYSQL, DatabaseType.POSTGRESQL) // false (not yet implemented)
* ```
*/
export function hasCrossDialectSupport(
sourceDatabaseType: DatabaseType,
targetDatabaseType: DatabaseType
): boolean {
// Same database type doesn't need cross-dialect conversion
if (sourceDatabaseType === targetDatabaseType) {
return false;
}
// Generic target doesn't need cross-dialect conversion
if (targetDatabaseType === DatabaseType.GENERIC) {
return false;
}
const supportedTargets = CROSS_DIALECT_SUPPORT[sourceDatabaseType];
if (!supportedTargets) {
return false;
}
return supportedTargets.includes(targetDatabaseType);
}
/**
* Get all supported target database types for a given source database type.
*
* @param sourceDatabaseType - The source database type
* @returns Array of supported target database types, or empty array if none
*/
export function getSupportedTargetDialects(
sourceDatabaseType: DatabaseType
): DatabaseType[] {
return CROSS_DIALECT_SUPPORT[sourceDatabaseType] ?? [];
}

View File

@@ -0,0 +1,645 @@
/**
* Deterministic exporter for PostgreSQL diagrams to SQL Server DDL.
* Converts PostgreSQL-specific types and features to SQL Server equivalents,
* with comments for features that cannot be fully converted.
*/
import type { Diagram } from '@/lib/domain/diagram';
import type { DBTable } from '@/lib/domain/db-table';
import type { DBField } from '@/lib/domain/db-field';
import type { DBRelationship } from '@/lib/domain/db-relationship';
import type { DBCustomType } from '@/lib/domain/db-custom-type';
import {
exportFieldComment,
formatMSSQLTableComment,
isFunction,
isKeyword,
strHasQuotes,
} from '../common';
import {
postgresqlIndexTypeToSQLServer,
getTypeMapping,
getFallbackTypeMapping,
} from './type-mappings';
import {
detectUnsupportedFeatures,
formatWarningsHeader,
getIndexInlineComment,
} from '../unsupported-features';
import { DatabaseType } from '@/lib/domain/database-type';
/**
* Convert a PostgreSQL default value to SQL Server equivalent
*/
function convertPostgresDefaultToMSSQL(field: DBField): string {
if (!field.default) {
return '';
}
const defaultValue = field.default.trim();
const defaultLower = defaultValue.toLowerCase();
// Handle sequences (nextval) - these become IDENTITY, no default needed
if (defaultLower.includes('nextval')) {
return '';
}
// Handle PostgreSQL now() -> SQL Server GETDATE()
if (defaultLower === 'now()' || defaultLower === 'current_timestamp') {
return 'GETDATE()';
}
// Handle UUID generation functions
if (
defaultLower.includes('gen_random_uuid') ||
defaultLower.includes('uuid_generate')
) {
return 'NEWID()';
}
// Handle JSONB/JSON functions
if (
defaultLower.includes('json_build_object') ||
defaultLower.includes('jsonb_build_object')
) {
return "N'{}'";
}
if (
defaultLower.includes('json_build_array') ||
defaultLower.includes('jsonb_build_array')
) {
return "N'[]'";
}
// Handle empty array defaults
if (
defaultLower === "'{}'::text[]" ||
defaultLower.match(/'\{\}'::.*\[\]/)
) {
return "N'[]'";
}
// Handle array literals
if (defaultLower.startsWith('array[')) {
const content = defaultValue.match(/ARRAY\[(.*?)\]/i)?.[1] || '';
return `N'[${content}]'`;
}
// Handle PostgreSQL true/false -> SQL Server 1/0
if (defaultLower === 'true') {
return '1';
}
if (defaultLower === 'false') {
return '0';
}
// Strip PostgreSQL type casts
const withoutCast = defaultValue.split('::')[0].trim();
// Handle SQL Server specific syntax for wrapped defaults
if (withoutCast.match(/^\(\(.*\)\)$/)) {
return withoutCast.replace(/^\(\(|\)\)$/g, '');
}
// If it's a function call, try to map to SQL Server
if (isFunction(withoutCast)) {
return withoutCast;
}
// If it's a keyword, keep it
if (isKeyword(withoutCast)) {
return withoutCast;
}
// If already quoted, convert to N'' style
if (strHasQuotes(withoutCast)) {
// Convert single quotes to N'' style
if (withoutCast.startsWith("'") && withoutCast.endsWith("'")) {
return `N${withoutCast}`;
}
return withoutCast;
}
// If it's a number, keep it
if (/^-?\d+(\.\d+)?$/.test(withoutCast)) {
return withoutCast;
}
// For other cases, wrap in N''
return `N'${withoutCast.replace(/'/g, "''")}'`;
}
/**
* Check if a field type matches a custom enum or composite type
*/
function findCustomType(
fieldTypeName: string,
customTypes: DBCustomType[]
): DBCustomType | undefined {
const normalizedName = fieldTypeName.toLowerCase();
return customTypes.find((ct) => {
const ctName = ct.schema ? `${ct.schema}.${ct.name}` : ct.name;
return (
ctName.toLowerCase() === normalizedName ||
ct.name.toLowerCase() === normalizedName
);
});
}
/**
* Map a PostgreSQL type to SQL Server type with size/precision handling
*/
function mapPostgresTypeToMSSQL(
field: DBField,
customTypes: DBCustomType[]
): {
typeName: string;
inlineComment: string | null;
} {
const originalType = field.type.name.toLowerCase();
let inlineComment: string | null = null;
// Handle array types
if (field.isArray || originalType.endsWith('[]')) {
return {
typeName: 'NVARCHAR(MAX)',
inlineComment: `Was: ${field.type.name} (PostgreSQL array, stored as JSON)`,
};
}
// Check for custom types (ENUM or composite)
const customType = findCustomType(field.type.name, customTypes);
if (customType) {
if (customType.kind === 'enum') {
// ENUMs become NVARCHAR(255)
return {
typeName: 'NVARCHAR(255)',
inlineComment: null, // Inline comment handled separately via getEnumValuesComment
};
} else if (customType.kind === 'composite') {
// Composite types become NVARCHAR(MAX) as JSON
return {
typeName: 'NVARCHAR(MAX)',
inlineComment: `Was: ${field.type.name} (PostgreSQL composite type)`,
};
}
}
// Look up mapping
const mapping = getTypeMapping(originalType, 'sqlserver');
const effectiveMapping = mapping || getFallbackTypeMapping('sqlserver');
let typeName = effectiveMapping.targetType;
// Handle size/precision
if (field.characterMaximumLength) {
if (
typeName === 'VARCHAR' ||
typeName === 'NVARCHAR' ||
typeName === 'CHAR' ||
typeName === 'NCHAR' ||
typeName === 'VARBINARY'
) {
typeName = `${typeName}(${field.characterMaximumLength})`;
}
} else if (effectiveMapping.defaultLength) {
if (
typeName === 'VARCHAR' ||
typeName === 'NVARCHAR' ||
typeName === 'CHAR' ||
typeName === 'NCHAR'
) {
typeName = `${typeName}(${effectiveMapping.defaultLength})`;
}
}
if (field.precision !== undefined && field.scale !== undefined) {
if (
typeName === 'DECIMAL' ||
typeName === 'NUMERIC' ||
typeName === 'DATETIME2' ||
typeName === 'DATETIMEOFFSET'
) {
if (typeName === 'DATETIME2' || typeName === 'DATETIMEOFFSET') {
// For datetime types, only precision applies (fractional seconds)
if (field.precision !== null && field.precision <= 7) {
typeName = `${typeName}(${field.precision})`;
}
} else {
typeName = `${typeName}(${field.precision}, ${field.scale})`;
}
}
} else if (field.precision !== undefined) {
if (typeName === 'DECIMAL' || typeName === 'NUMERIC') {
typeName = `${typeName}(${field.precision})`;
}
} else if (
effectiveMapping.defaultPrecision &&
(typeName === 'DECIMAL' || typeName === 'NUMERIC')
) {
typeName = `${typeName}(${effectiveMapping.defaultPrecision}, ${effectiveMapping.defaultScale || 0})`;
}
// Set inline comment if conversion note exists
if (effectiveMapping.includeInlineComment) {
inlineComment = `Was: ${field.type.name}`;
}
return { typeName, inlineComment };
}
/**
* Check if a field should have IDENTITY
*/
function isIdentity(field: DBField): boolean {
// Check increment flag
if (field.increment) {
return true;
}
// Check for serial types
const typeLower = field.type.name.toLowerCase();
if (
typeLower === 'serial' ||
typeLower === 'smallserial' ||
typeLower === 'bigserial'
) {
return true;
}
// Check for nextval in default
if (field.default?.toLowerCase().includes('nextval')) {
return true;
}
return false;
}
/**
* Build enum value comment for custom enum types
*/
function getEnumValuesComment(
fieldTypeName: string,
customTypes: DBCustomType[]
): string | null {
const enumType = customTypes.find((ct) => {
const ctName = ct.schema ? `${ct.schema}.${ct.name}` : ct.name;
return (
ctName.toLowerCase() === fieldTypeName.toLowerCase() ||
ct.name.toLowerCase() === fieldTypeName.toLowerCase()
);
});
if (enumType?.kind === 'enum' && enumType.values?.length) {
return `PostgreSQL ENUM: '${enumType.values.join("', '")}'`;
}
return null;
}
/**
* Main export function: PostgreSQL diagram to SQL Server DDL
*/
export function exportPostgreSQLToMSSQL({
diagram,
onlyRelationships = false,
}: {
diagram: Diagram;
onlyRelationships?: boolean;
}): string {
if (!diagram.tables || !diagram.relationships) {
return '';
}
const tables = diagram.tables;
const relationships = diagram.relationships;
const customTypes = diagram.customTypes || [];
// Detect unsupported features for warnings header
const unsupportedFeatures = detectUnsupportedFeatures(
diagram,
DatabaseType.SQL_SERVER
);
// Build output
let sqlScript = formatWarningsHeader(
unsupportedFeatures,
'PostgreSQL',
'SQL Server'
);
if (!onlyRelationships) {
// Create schemas if they don't exist
const schemas = new Set<string>();
tables.forEach((table) => {
if (table.schema) {
schemas.add(table.schema);
}
});
schemas.forEach((schema) => {
sqlScript += `IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${schema}')\nBEGIN\n EXEC('CREATE SCHEMA [${schema}]');\nEND;\nGO\n`;
});
if (schemas.size > 0) {
sqlScript += '\n';
}
// Generate table creation SQL
sqlScript += tables
.map((table: DBTable) => {
// Skip views
if (table.isView) {
return '';
}
const tableName = table.schema
? `[${table.schema}].[${table.name}]`
: `[${table.name}]`;
// Get primary key fields
const primaryKeyFields = table.fields.filter(
(f) => f.primaryKey
);
return `${
table.comments
? formatMSSQLTableComment(table.comments)
: ''
}CREATE TABLE ${tableName} (\n${table.fields
.map((field: DBField) => {
const fieldName = `[${field.name}]`;
// Map type to SQL Server
const { typeName, inlineComment } =
mapPostgresTypeToMSSQL(field, customTypes);
// Check for enum type and get values
const enumComment = getEnumValuesComment(
field.type.name,
customTypes
);
// Combine inline comments
const fullInlineComment = enumComment || inlineComment;
const notNull = field.nullable ? '' : ' NOT NULL';
// Handle IDENTITY
const identity = isIdentity(field)
? ' IDENTITY(1,1)'
: '';
// Only add UNIQUE constraint if not primary key
const unique =
!field.primaryKey && field.unique ? ' UNIQUE' : '';
// Handle default value
const convertedDefault =
convertPostgresDefaultToMSSQL(field);
const defaultValue =
convertedDefault && !identity
? ` DEFAULT ${convertedDefault}`
: '';
// Build inline SQL comment for conversion notes
const sqlInlineComment = fullInlineComment
? ` -- ${fullInlineComment}`
: '';
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeName}${notNull}${identity}${unique}${defaultValue}${sqlInlineComment}`;
})
.join(',\n')}${
// Add PRIMARY KEY as table constraint
primaryKeyFields.length > 0
? `,\n ${(() => {
const pkIndex = table.indexes.find(
(idx) => idx.isPrimaryKey
);
return pkIndex?.name
? `CONSTRAINT [${pkIndex.name}] `
: '';
})()}PRIMARY KEY (${primaryKeyFields
.map((f) => `[${f.name}]`)
.join(', ')})`
: ''
}\n);\nGO${
// Add indexes
(() => {
const validIndexes = table.indexes
.map((index) => {
// Skip primary key indexes
if (index.isPrimaryKey) {
return '';
}
// Get the list of fields for this index
const indexFields = index.fieldIds
.map((fieldId) => {
const field = table.fields.find(
(f) => f.id === fieldId
);
return field ? field : null;
})
.filter(Boolean);
// Skip if matches primary key
if (
primaryKeyFields.length ===
indexFields.length &&
primaryKeyFields.every((pk) =>
indexFields.some(
(field) =>
field && field.id === pk.id
)
)
) {
return '';
}
// Get index type conversion
const indexType = (
index.type || 'btree'
).toLowerCase();
const indexTypeMapping =
postgresqlIndexTypeToSQLServer[indexType];
const indexInlineComment =
getIndexInlineComment(index, 'sqlserver');
const indexName = table.schema
? `[${table.schema}_${index.name}]`
: `[${index.name}]`;
const indexFieldNames = indexFields
.map((field) =>
field ? `[${field.name}]` : ''
)
.filter(Boolean);
// SQL Server has 32 column limit
if (indexFieldNames.length > 32) {
console.warn(
`Warning: Index ${indexName} has ${indexFieldNames.length} columns. Truncating to 32.`
);
indexFieldNames.length = 32;
}
const commentStr = indexInlineComment
? ` -- ${indexInlineComment}`
: '';
return indexFieldNames.length > 0
? `CREATE ${index.unique ? 'UNIQUE ' : ''}${indexTypeMapping?.targetType === 'CLUSTERED' ? 'CLUSTERED ' : 'NONCLUSTERED '}INDEX ${indexName}\nON ${tableName} (${indexFieldNames.join(', ')});${commentStr}`
: '';
})
.filter(Boolean)
.sort((a, b) => a.localeCompare(b));
return validIndexes.length > 0
? `\n-- Indexes\n${validIndexes.join('\nGO\n')}\nGO`
: '';
})()
}`;
})
.filter(Boolean)
.join('\n');
// Add extended properties for table/column comments
const commentStatements: string[] = [];
for (const table of tables) {
if (table.isView) continue;
const schemaName = table.schema || 'dbo';
if (table.comments) {
commentStatements.push(
`EXEC sp_addextendedproperty @name=N'MS_Description', @value=N'${table.comments.replace(/'/g, "''")}', @level0type=N'SCHEMA', @level0name=N'${schemaName}', @level1type=N'TABLE', @level1name=N'${table.name}';`
);
}
for (const field of table.fields) {
if (field.comments) {
commentStatements.push(
`EXEC sp_addextendedproperty @name=N'MS_Description', @value=N'${field.comments.replace(/'/g, "''")}', @level0type=N'SCHEMA', @level0name=N'${schemaName}', @level1type=N'TABLE', @level1name=N'${table.name}', @level2type=N'COLUMN', @level2name=N'${field.name}';`
);
}
}
}
if (commentStatements.length > 0) {
sqlScript += '\n-- Table and column descriptions\n';
sqlScript += commentStatements.join('\nGO\n');
sqlScript += '\nGO\n';
}
}
// Generate foreign keys
if (relationships.length > 0) {
sqlScript += '\n-- Foreign key constraints\n';
// Process relationships and group by schema
const foreignKeys = relationships
.map((r: DBRelationship) => {
const sourceTable = tables.find(
(t) => t.id === r.sourceTableId
);
const targetTable = tables.find(
(t) => t.id === r.targetTableId
);
if (
!sourceTable ||
!targetTable ||
sourceTable.isView ||
targetTable.isView
) {
return null;
}
const sourceField = sourceTable.fields.find(
(f) => f.id === r.sourceFieldId
);
const targetField = targetTable.fields.find(
(f) => f.id === r.targetFieldId
);
if (!sourceField || !targetField) {
return null;
}
// Determine FK placement based on cardinality
let fkTable, fkField, refTable, refField;
if (
r.sourceCardinality === 'one' &&
r.targetCardinality === 'many'
) {
fkTable = targetTable;
fkField = targetField;
refTable = sourceTable;
refField = sourceField;
} else if (
r.sourceCardinality === 'many' &&
r.targetCardinality === 'one'
) {
fkTable = sourceTable;
fkField = sourceField;
refTable = targetTable;
refField = targetField;
} else if (
r.sourceCardinality === 'one' &&
r.targetCardinality === 'one'
) {
fkTable = sourceTable;
fkField = sourceField;
refTable = targetTable;
refField = targetField;
} else {
return null;
}
const fkTableName = fkTable.schema
? `[${fkTable.schema}].[${fkTable.name}]`
: `[${fkTable.name}]`;
const refTableName = refTable.schema
? `[${refTable.schema}].[${refTable.name}]`
: `[${refTable.name}]`;
return {
schema: fkTable.schema || 'dbo',
sql: `ALTER TABLE ${fkTableName} ADD CONSTRAINT [${r.name || `fk_${fkTable.name}_${fkField.name}`}] FOREIGN KEY([${fkField.name}]) REFERENCES ${refTableName}([${refField.name}]);`,
};
})
.filter(Boolean) as { schema: string; sql: string }[];
// Group by schema
const fksBySchema = foreignKeys.reduce(
(acc, fk) => {
if (!acc[fk.schema]) {
acc[fk.schema] = [];
}
acc[fk.schema].push(fk.sql);
return acc;
},
{} as Record<string, string[]>
);
// Sort schemas and output
const sortedSchemas = Object.keys(fksBySchema).sort();
const fkSql = sortedSchemas
.map((schema, index) => {
const schemaFks = fksBySchema[schema].join('\nGO\n');
if (index === 0) {
return `-- Schema: ${schema}\n${schemaFks}`;
} else {
return `\n-- Schema: ${schema}\n${schemaFks}`;
}
})
.join('\n');
sqlScript += fkSql;
sqlScript += '\nGO\n';
}
return sqlScript;
}

View File

@@ -0,0 +1,601 @@
/**
* Deterministic exporter for PostgreSQL diagrams to MySQL DDL.
* Converts PostgreSQL-specific types and features to MySQL equivalents,
* with comments for features that cannot be fully converted.
*/
import type { Diagram } from '@/lib/domain/diagram';
import type { DBTable } from '@/lib/domain/db-table';
import type { DBField } from '@/lib/domain/db-field';
import type { DBRelationship } from '@/lib/domain/db-relationship';
import type { DBCustomType } from '@/lib/domain/db-custom-type';
import {
exportFieldComment,
escapeSQLComment,
formatTableComment,
isFunction,
isKeyword,
strHasQuotes,
} from '../common';
import {
postgresqlIndexTypeToMySQL,
getTypeMapping,
getFallbackTypeMapping,
} from './type-mappings';
import {
detectUnsupportedFeatures,
formatWarningsHeader,
getIndexInlineComment,
} from '../unsupported-features';
import { DatabaseType } from '@/lib/domain/database-type';
/**
* Convert a PostgreSQL default value to MySQL equivalent
*/
function convertPostgresDefaultToMySQL(field: DBField): string {
if (!field.default) {
return '';
}
const defaultValue = field.default.trim();
const defaultLower = defaultValue.toLowerCase();
// Handle sequences (nextval) - these become AUTO_INCREMENT, no default needed
if (defaultLower.includes('nextval')) {
return '';
}
// Handle PostgreSQL now() -> MySQL CURRENT_TIMESTAMP
if (defaultLower === 'now()' || defaultLower === 'current_timestamp') {
return 'CURRENT_TIMESTAMP';
}
// Handle UUID generation functions
if (
defaultLower.includes('gen_random_uuid') ||
defaultLower.includes('uuid_generate')
) {
return '(UUID())';
}
// Handle JSONB/JSON functions
if (
defaultLower.includes('json_build_object') ||
defaultLower.includes('jsonb_build_object')
) {
return "'{}'";
}
if (
defaultLower.includes('json_build_array') ||
defaultLower.includes('jsonb_build_array')
) {
return "'[]'";
}
// Handle empty array defaults
if (
defaultLower === "'{}'::text[]" ||
defaultLower.match(/'\{\}'::.*\[\]/)
) {
return "'[]'";
}
// Handle array literals like ARRAY[1,2,3]
if (defaultLower.startsWith('array[')) {
const content = defaultValue.match(/ARRAY\[(.*?)\]/i)?.[1] || '';
return `'[${content}]'`;
}
// Strip PostgreSQL type casts
const withoutCast = defaultValue.split('::')[0].trim();
// If it's a function call, keep it (MySQL might support it)
if (isFunction(withoutCast)) {
return withoutCast;
}
// If it's a keyword, keep it
if (isKeyword(withoutCast)) {
return withoutCast;
}
// If already quoted, keep it
if (strHasQuotes(withoutCast)) {
return withoutCast;
}
// If it's a number, keep it
if (/^-?\d+(\.\d+)?$/.test(withoutCast)) {
return withoutCast;
}
// For other cases, add quotes
return `'${withoutCast.replace(/'/g, "''")}'`;
}
/**
* Check if a field type matches a custom enum or composite type
*/
function findCustomType(
fieldTypeName: string,
customTypes: DBCustomType[]
): DBCustomType | undefined {
const normalizedName = fieldTypeName.toLowerCase();
return customTypes.find((ct) => {
const ctName = ct.schema ? `${ct.schema}.${ct.name}` : ct.name;
return (
ctName.toLowerCase() === normalizedName ||
ct.name.toLowerCase() === normalizedName
);
});
}
/**
* Map a PostgreSQL type to MySQL type with size/precision handling
*/
function mapPostgresTypeToMySQL(
field: DBField,
customTypes: DBCustomType[]
): {
typeName: string;
inlineComment: string | null;
} {
const originalType = field.type.name.toLowerCase();
let inlineComment: string | null = null;
// Handle array types
if (field.isArray || originalType.endsWith('[]')) {
return {
typeName: 'JSON',
inlineComment: `Was: ${field.type.name} (PostgreSQL array)`,
};
}
// Check for custom types (ENUM or composite)
const customType = findCustomType(field.type.name, customTypes);
if (customType) {
if (customType.kind === 'enum') {
// ENUMs become VARCHAR(255)
return {
typeName: 'VARCHAR(255)',
inlineComment: null, // Inline comment handled separately via getEnumValuesComment
};
} else if (customType.kind === 'composite') {
// Composite types become JSON
return {
typeName: 'JSON',
inlineComment: `Was: ${field.type.name} (PostgreSQL composite type)`,
};
}
}
// Look up mapping
const mapping = getTypeMapping(originalType, 'mysql');
const effectiveMapping = mapping || getFallbackTypeMapping('mysql');
let typeName = effectiveMapping.targetType;
// Handle size/precision
if (field.characterMaximumLength) {
if (
typeName === 'VARCHAR' ||
typeName === 'CHAR' ||
typeName === 'VARBINARY'
) {
typeName = `${typeName}(${field.characterMaximumLength})`;
}
} else if (effectiveMapping.defaultLength) {
if (typeName === 'VARCHAR' || typeName === 'CHAR') {
typeName = `${typeName}(${effectiveMapping.defaultLength})`;
}
}
if (field.precision !== undefined && field.scale !== undefined) {
if (typeName === 'DECIMAL' || typeName === 'NUMERIC') {
typeName = `${typeName}(${field.precision}, ${field.scale})`;
}
} else if (field.precision !== undefined) {
if (typeName === 'DECIMAL' || typeName === 'NUMERIC') {
typeName = `${typeName}(${field.precision})`;
}
} else if (
effectiveMapping.defaultPrecision &&
(typeName === 'DECIMAL' || typeName === 'NUMERIC')
) {
typeName = `${typeName}(${effectiveMapping.defaultPrecision}, ${effectiveMapping.defaultScale || 0})`;
}
// Set inline comment if conversion note exists
if (effectiveMapping.includeInlineComment) {
inlineComment = `Was: ${field.type.name}`;
}
return { typeName, inlineComment };
}
/**
* Check if a field should have AUTO_INCREMENT
*/
function isAutoIncrement(field: DBField): boolean {
// Check increment flag
if (field.increment) {
return true;
}
// Check for serial types
const typeLower = field.type.name.toLowerCase();
if (
typeLower === 'serial' ||
typeLower === 'smallserial' ||
typeLower === 'bigserial'
) {
return true;
}
// Check for nextval in default
if (field.default?.toLowerCase().includes('nextval')) {
return true;
}
return false;
}
/**
* Build enum value comment for custom enum types
*/
function getEnumValuesComment(
fieldTypeName: string,
customTypes: DBCustomType[]
): string | null {
// Find matching enum type
const enumType = customTypes.find((ct) => {
const ctName = ct.schema ? `${ct.schema}.${ct.name}` : ct.name;
return (
ctName.toLowerCase() === fieldTypeName.toLowerCase() ||
ct.name.toLowerCase() === fieldTypeName.toLowerCase()
);
});
if (enumType?.kind === 'enum' && enumType.values?.length) {
return `PostgreSQL ENUM: '${enumType.values.join("', '")}'`;
}
return null;
}
/**
* Main export function: PostgreSQL diagram to MySQL DDL
*/
export function exportPostgreSQLToMySQL({
diagram,
onlyRelationships = false,
}: {
diagram: Diagram;
onlyRelationships?: boolean;
}): string {
if (!diagram.tables || !diagram.relationships) {
return '';
}
const tables = diagram.tables;
const relationships = diagram.relationships;
const customTypes = diagram.customTypes || [];
// Detect unsupported features for warnings header
const unsupportedFeatures = detectUnsupportedFeatures(
diagram,
DatabaseType.MYSQL
);
// Build output
let sqlScript = formatWarningsHeader(
unsupportedFeatures,
'PostgreSQL',
'MySQL'
);
if (!onlyRelationships) {
// Create databases (schemas) if they don't exist
const schemas = new Set<string>();
tables.forEach((table) => {
if (table.schema) {
schemas.add(table.schema);
}
});
schemas.forEach((schema) => {
sqlScript += `CREATE DATABASE IF NOT EXISTS \`${schema}\`;\n`;
});
if (schemas.size > 0) {
sqlScript += '\n';
}
// Generate table creation SQL
sqlScript += tables
.map((table: DBTable) => {
// Skip views
if (table.isView) {
return '';
}
// Use schema prefix if available
const tableName = table.schema
? `\`${table.schema}\`.\`${table.name}\``
: `\`${table.name}\``;
// Get primary key fields
const primaryKeyFields = table.fields.filter(
(f) => f.primaryKey
);
return `${
table.comments ? formatTableComment(table.comments) : ''
}\nCREATE TABLE IF NOT EXISTS ${tableName} (\n${table.fields
.map((field: DBField) => {
const fieldName = `\`${field.name}\``;
// Map type to MySQL
const { typeName, inlineComment } =
mapPostgresTypeToMySQL(field, customTypes);
// Check for enum type and get values
const enumComment = getEnumValuesComment(
field.type.name,
customTypes
);
// Combine inline comments
const fullInlineComment = enumComment || inlineComment;
const notNull = field.nullable ? '' : ' NOT NULL';
// Handle auto_increment
const autoIncrement = isAutoIncrement(field)
? ' AUTO_INCREMENT'
: '';
// Only add UNIQUE constraint if the field is not part of the primary key
const unique =
!field.primaryKey && field.unique ? ' UNIQUE' : '';
// Handle default value
const convertedDefault =
convertPostgresDefaultToMySQL(field);
const defaultValue =
convertedDefault && !autoIncrement
? ` DEFAULT ${convertedDefault}`
: '';
// MySQL supports inline column comments
const comment = field.comments
? ` COMMENT '${escapeSQLComment(field.comments)}'`
: '';
// Build inline SQL comment for conversion notes
const sqlInlineComment = fullInlineComment
? ` -- ${fullInlineComment}`
: '';
return `${exportFieldComment(field.comments ?? '')} ${fieldName} ${typeName}${notNull}${autoIncrement}${unique}${defaultValue}${comment}${sqlInlineComment}`;
})
.join(',\n')}${
// Add PRIMARY KEY as table constraint
primaryKeyFields.length > 0
? `,\n ${(() => {
// Find PK index to get the constraint name
const pkIndex = table.indexes.find(
(idx) => idx.isPrimaryKey
);
return pkIndex?.name
? `CONSTRAINT \`${pkIndex.name}\` `
: '';
})()}PRIMARY KEY (${primaryKeyFields
.map((f) => `\`${f.name}\``)
.join(', ')})`
: ''
}\n)${
// MySQL supports table comments
table.comments
? ` COMMENT='${escapeSQLComment(table.comments)}'`
: ''
};${
// Add indexes
(() => {
const validIndexes = table.indexes
.map((index) => {
// Skip primary key indexes
if (index.isPrimaryKey) {
return '';
}
// Get the list of fields for this index
const indexFields = index.fieldIds
.map((fieldId) => {
const field = table.fields.find(
(f) => f.id === fieldId
);
return field ? field : null;
})
.filter(Boolean);
// Skip if this index exactly matches the primary key fields
if (
primaryKeyFields.length ===
indexFields.length &&
primaryKeyFields.every((pk) =>
indexFields.some(
(field) =>
field && field.id === pk.id
)
)
) {
return '';
}
// Get index type conversion info
const indexType = (
index.type || 'btree'
).toLowerCase();
const indexTypeMapping =
postgresqlIndexTypeToMySQL[indexType];
const indexInlineComment =
getIndexInlineComment(index, 'mysql');
// Create index name
const fieldNamesForIndex = indexFields
.map((field) => field?.name || '')
.join('_');
const uniqueIndicator = index.unique
? '_unique'
: '';
const indexName = `\`idx_${table.name}_${fieldNamesForIndex}${uniqueIndicator}\``;
// Get the properly quoted field names
const indexFieldNames = indexFields
.map((field) =>
field ? `\`${field.name}\`` : ''
)
.filter(Boolean);
// Check for text/blob fields that need prefix length
const indexFieldsWithPrefix =
indexFieldNames.map((name) => {
const field = indexFields.find(
(f) => `\`${f?.name}\`` === name
);
if (!field) return name;
const typeName =
field.type.name.toLowerCase();
// Check if it maps to TEXT, JSON, or BLOB in MySQL
const mapping = getTypeMapping(
typeName,
'mysql'
);
const targetType = (
mapping?.targetType || ''
).toUpperCase();
if (
targetType === 'TEXT' ||
targetType === 'LONGTEXT' ||
targetType === 'MEDIUMTEXT' ||
targetType === 'JSON' ||
targetType === 'BLOB' ||
targetType === 'LONGBLOB'
) {
return `${name}(255)`;
}
return name;
});
const indexTypeStr =
indexTypeMapping?.targetType &&
indexTypeMapping.targetType !== 'BTREE'
? ` USING ${indexTypeMapping.targetType}`
: '';
const commentStr = indexInlineComment
? ` -- ${indexInlineComment}`
: '';
return indexFieldNames.length > 0
? `CREATE ${index.unique ? 'UNIQUE ' : ''}INDEX ${indexName} ON ${tableName}${indexTypeStr} (${indexFieldsWithPrefix.join(', ')});${commentStr}`
: '';
})
.filter(Boolean)
.sort((a, b) => a.localeCompare(b));
return validIndexes.length > 0
? `\n-- Indexes\n${validIndexes.join('\n')}`
: '';
})()
}`;
})
.filter(Boolean)
.join('\n');
}
// Generate foreign keys
if (relationships.length > 0) {
sqlScript += '\n-- Foreign key constraints\n';
const foreignKeys = relationships
.map((r: DBRelationship) => {
const sourceTable = tables.find(
(t) => t.id === r.sourceTableId
);
const targetTable = tables.find(
(t) => t.id === r.targetTableId
);
if (
!sourceTable ||
!targetTable ||
sourceTable.isView ||
targetTable.isView
) {
return '';
}
const sourceField = sourceTable.fields.find(
(f) => f.id === r.sourceFieldId
);
const targetField = targetTable.fields.find(
(f) => f.id === r.targetFieldId
);
if (!sourceField || !targetField) {
return '';
}
// Determine which table should have the foreign key based on cardinality
let fkTable, fkField, refTable, refField;
if (
r.sourceCardinality === 'one' &&
r.targetCardinality === 'many'
) {
fkTable = targetTable;
fkField = targetField;
refTable = sourceTable;
refField = sourceField;
} else if (
r.sourceCardinality === 'many' &&
r.targetCardinality === 'one'
) {
fkTable = sourceTable;
fkField = sourceField;
refTable = targetTable;
refField = targetField;
} else if (
r.sourceCardinality === 'one' &&
r.targetCardinality === 'one'
) {
fkTable = sourceTable;
fkField = sourceField;
refTable = targetTable;
refField = targetField;
} else {
// Many-to-many relationships need a junction table, skip
return '';
}
const fkTableName = fkTable.schema
? `\`${fkTable.schema}\`.\`${fkTable.name}\``
: `\`${fkTable.name}\``;
const refTableName = refTable.schema
? `\`${refTable.schema}\`.\`${refTable.name}\``
: `\`${refTable.name}\``;
const constraintName = `\`fk_${fkTable.name}_${fkField.name}\``;
return `ALTER TABLE ${fkTableName} ADD CONSTRAINT ${constraintName} FOREIGN KEY(\`${fkField.name}\`) REFERENCES ${refTableName}(\`${refField.name}\`);`;
})
.filter(Boolean);
sqlScript += foreignKeys.join('\n');
}
return sqlScript;
}

View File

@@ -0,0 +1,576 @@
/**
* Type mappings for PostgreSQL as the source dialect.
* Maps PostgreSQL types to MySQL and SQL Server equivalents.
*/
import type {
TypeMapping,
TypeMappingTable,
IndexTypeMappingTable,
} from '../types';
/**
* PostgreSQL to MySQL type mappings
*/
export const postgresqlToMySQL: TypeMappingTable = {
// Integer types
int: { targetType: 'INT' },
int4: { targetType: 'INT' },
integer: { targetType: 'INT' },
smallint: { targetType: 'SMALLINT' },
int2: { targetType: 'SMALLINT' },
bigint: { targetType: 'BIGINT' },
int8: { targetType: 'BIGINT' },
// Serial types (auto-increment) - handled specially in exporter
serial: { targetType: 'INT' },
smallserial: { targetType: 'SMALLINT' },
bigserial: { targetType: 'BIGINT' },
// Floating point types
real: { targetType: 'FLOAT' },
float4: { targetType: 'FLOAT' },
'double precision': { targetType: 'DOUBLE' },
float8: { targetType: 'DOUBLE' },
float: { targetType: 'DOUBLE' },
// Decimal/Numeric types
decimal: { targetType: 'DECIMAL', defaultPrecision: 10, defaultScale: 2 },
numeric: { targetType: 'DECIMAL', defaultPrecision: 10, defaultScale: 2 },
money: {
targetType: 'DECIMAL',
defaultPrecision: 19,
defaultScale: 4,
conversionNote: 'PostgreSQL money type converted to DECIMAL(19,4)',
includeInlineComment: true,
},
// Character types
char: { targetType: 'CHAR', defaultLength: 1 },
character: { targetType: 'CHAR', defaultLength: 1 },
varchar: { targetType: 'VARCHAR', defaultLength: 255 },
'character varying': { targetType: 'VARCHAR', defaultLength: 255 },
text: { targetType: 'TEXT' },
name: { targetType: 'VARCHAR', defaultLength: 63 },
// Binary types
bytea: {
targetType: 'LONGBLOB',
conversionNote: 'PostgreSQL bytea converted to LONGBLOB',
includeInlineComment: true,
},
// Boolean type
boolean: { targetType: 'TINYINT(1)' },
bool: { targetType: 'TINYINT(1)' },
// Date/Time types
date: { targetType: 'DATE' },
time: { targetType: 'TIME' },
timetz: {
targetType: 'TIME',
conversionNote: 'Time zone information lost in conversion',
includeInlineComment: true,
},
'time with time zone': {
targetType: 'TIME',
conversionNote: 'Time zone information lost in conversion',
includeInlineComment: true,
},
'time without time zone': { targetType: 'TIME' },
timestamp: { targetType: 'DATETIME' },
timestamptz: {
targetType: 'DATETIME',
conversionNote: 'Time zone information lost in conversion',
includeInlineComment: true,
},
'timestamp with time zone': {
targetType: 'DATETIME',
conversionNote: 'Time zone information lost in conversion',
includeInlineComment: true,
},
'timestamp without time zone': { targetType: 'DATETIME' },
interval: {
targetType: 'VARCHAR',
defaultLength: 100,
conversionNote:
'PostgreSQL interval type has no MySQL equivalent, stored as string',
includeInlineComment: true,
},
// JSON types
json: { targetType: 'JSON' },
jsonb: {
targetType: 'JSON',
conversionNote:
'JSONB binary optimizations not available in MySQL JSON',
includeInlineComment: true,
},
// UUID type
uuid: {
targetType: 'CHAR',
defaultLength: 36,
conversionNote: 'UUID stored as CHAR(36)',
includeInlineComment: true,
},
// Network types
inet: {
targetType: 'VARCHAR',
defaultLength: 45,
conversionNote: 'PostgreSQL inet type converted to VARCHAR',
includeInlineComment: true,
},
cidr: {
targetType: 'VARCHAR',
defaultLength: 45,
conversionNote: 'PostgreSQL cidr type converted to VARCHAR',
includeInlineComment: true,
},
macaddr: {
targetType: 'VARCHAR',
defaultLength: 17,
conversionNote: 'PostgreSQL macaddr type converted to VARCHAR',
includeInlineComment: true,
},
macaddr8: {
targetType: 'VARCHAR',
defaultLength: 23,
conversionNote: 'PostgreSQL macaddr8 type converted to VARCHAR',
includeInlineComment: true,
},
// Bit string types
bit: { targetType: 'BIT', defaultLength: 1 },
varbit: { targetType: 'BIT', defaultLength: 64 },
'bit varying': { targetType: 'BIT', defaultLength: 64 },
// Geometric types (MySQL has partial support)
point: { targetType: 'POINT' },
line: {
targetType: 'LINESTRING',
conversionNote: 'PostgreSQL infinite line converted to LINESTRING',
includeInlineComment: true,
},
lseg: { targetType: 'LINESTRING' },
box: { targetType: 'POLYGON' },
path: { targetType: 'LINESTRING' },
polygon: { targetType: 'POLYGON' },
circle: {
targetType: 'POLYGON',
conversionNote: 'PostgreSQL circle approximated as POLYGON',
includeInlineComment: true,
},
geometry: { targetType: 'GEOMETRY' },
geography: { targetType: 'GEOMETRY' },
// Text search types (no MySQL equivalent)
tsvector: {
targetType: 'TEXT',
conversionNote:
'PostgreSQL full-text search type has no MySQL equivalent',
includeInlineComment: true,
},
tsquery: {
targetType: 'TEXT',
conversionNote:
'PostgreSQL full-text search type has no MySQL equivalent',
includeInlineComment: true,
},
// Range types (no MySQL equivalent)
int4range: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
int8range: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
numrange: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
tsrange: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
tstzrange: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
daterange: {
targetType: 'JSON',
conversionNote: 'PostgreSQL range type stored as JSON [lower, upper]',
includeInlineComment: true,
},
// OID and system types
oid: { targetType: 'INT UNSIGNED' },
regproc: { targetType: 'VARCHAR', defaultLength: 255 },
regprocedure: { targetType: 'VARCHAR', defaultLength: 255 },
regoper: { targetType: 'VARCHAR', defaultLength: 255 },
regoperator: { targetType: 'VARCHAR', defaultLength: 255 },
regclass: { targetType: 'VARCHAR', defaultLength: 255 },
regtype: { targetType: 'VARCHAR', defaultLength: 255 },
regrole: { targetType: 'VARCHAR', defaultLength: 255 },
regnamespace: { targetType: 'VARCHAR', defaultLength: 255 },
regconfig: { targetType: 'VARCHAR', defaultLength: 255 },
regdictionary: { targetType: 'VARCHAR', defaultLength: 255 },
// XML type
xml: {
targetType: 'TEXT',
conversionNote: 'PostgreSQL XML type converted to TEXT',
includeInlineComment: true,
},
// User-defined and array types (handled specially)
'user-defined': {
targetType: 'JSON',
conversionNote: 'PostgreSQL custom type converted to JSON',
includeInlineComment: true,
},
array: {
targetType: 'JSON',
conversionNote: 'PostgreSQL array type converted to JSON',
includeInlineComment: true,
},
// Enum type (handled specially, but fallback here)
enum: {
targetType: 'VARCHAR',
defaultLength: 255,
conversionNote: 'PostgreSQL ENUM converted to VARCHAR',
includeInlineComment: true,
},
};
/**
* PostgreSQL to SQL Server type mappings
*/
export const postgresqlToSQLServer: TypeMappingTable = {
// Integer types
int: { targetType: 'INT' },
int4: { targetType: 'INT' },
integer: { targetType: 'INT' },
smallint: { targetType: 'SMALLINT' },
int2: { targetType: 'SMALLINT' },
bigint: { targetType: 'BIGINT' },
int8: { targetType: 'BIGINT' },
// Serial types - handled specially with IDENTITY
serial: { targetType: 'INT' },
smallserial: { targetType: 'SMALLINT' },
bigserial: { targetType: 'BIGINT' },
// Floating point types
real: { targetType: 'REAL' },
float4: { targetType: 'REAL' },
'double precision': { targetType: 'FLOAT' },
float8: { targetType: 'FLOAT' },
float: { targetType: 'FLOAT' },
// Decimal/Numeric types
decimal: { targetType: 'DECIMAL', defaultPrecision: 18, defaultScale: 2 },
numeric: { targetType: 'NUMERIC', defaultPrecision: 18, defaultScale: 2 },
money: { targetType: 'MONEY' },
// Character types
char: { targetType: 'CHAR', defaultLength: 1 },
character: { targetType: 'CHAR', defaultLength: 1 },
varchar: { targetType: 'VARCHAR', defaultLength: 255 },
'character varying': { targetType: 'VARCHAR', defaultLength: 255 },
text: { targetType: 'NVARCHAR(MAX)' },
name: { targetType: 'NVARCHAR', defaultLength: 128 },
// Binary types
bytea: { targetType: 'VARBINARY(MAX)' },
// Boolean type
boolean: { targetType: 'BIT' },
bool: { targetType: 'BIT' },
// Date/Time types
date: { targetType: 'DATE' },
time: { targetType: 'TIME' },
timetz: {
targetType: 'TIME',
conversionNote: 'Time zone offset not preserved',
includeInlineComment: true,
},
'time with time zone': {
targetType: 'TIME',
conversionNote: 'Time zone offset not preserved',
includeInlineComment: true,
},
'time without time zone': { targetType: 'TIME' },
timestamp: { targetType: 'DATETIME2' },
timestamptz: { targetType: 'DATETIMEOFFSET' },
'timestamp with time zone': { targetType: 'DATETIMEOFFSET' },
'timestamp without time zone': { targetType: 'DATETIME2' },
interval: {
targetType: 'NVARCHAR',
defaultLength: 100,
conversionNote:
'PostgreSQL interval type has no SQL Server equivalent, stored as string',
includeInlineComment: true,
},
// JSON types
json: { targetType: 'NVARCHAR(MAX)' },
jsonb: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'JSON stored as NVARCHAR(MAX). Use ISJSON() for validation, JSON functions for querying.',
includeInlineComment: true,
},
// UUID type
uuid: { targetType: 'UNIQUEIDENTIFIER' },
// Network types
inet: {
targetType: 'NVARCHAR',
defaultLength: 45,
conversionNote: 'PostgreSQL inet type converted to NVARCHAR',
includeInlineComment: true,
},
cidr: {
targetType: 'NVARCHAR',
defaultLength: 45,
conversionNote: 'PostgreSQL cidr type converted to NVARCHAR',
includeInlineComment: true,
},
macaddr: {
targetType: 'NVARCHAR',
defaultLength: 17,
conversionNote: 'PostgreSQL macaddr type converted to NVARCHAR',
includeInlineComment: true,
},
macaddr8: {
targetType: 'NVARCHAR',
defaultLength: 23,
conversionNote: 'PostgreSQL macaddr8 type converted to NVARCHAR',
includeInlineComment: true,
},
// Bit string types
bit: { targetType: 'BIT' },
varbit: {
targetType: 'VARBINARY',
defaultLength: 64,
conversionNote: 'Variable bit string converted to VARBINARY',
includeInlineComment: true,
},
'bit varying': {
targetType: 'VARBINARY',
defaultLength: 64,
conversionNote: 'Variable bit string converted to VARBINARY',
includeInlineComment: true,
},
// Geometric types
point: { targetType: 'GEOMETRY' },
line: { targetType: 'GEOMETRY' },
lseg: { targetType: 'GEOMETRY' },
box: { targetType: 'GEOMETRY' },
path: { targetType: 'GEOMETRY' },
polygon: { targetType: 'GEOMETRY' },
circle: {
targetType: 'GEOMETRY',
conversionNote: 'Circle represented as geometry point with radius',
includeInlineComment: true,
},
geometry: { targetType: 'GEOMETRY' },
geography: { targetType: 'GEOGRAPHY' },
// Text search types (no direct equivalent)
tsvector: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL full-text search. Use SQL Server Full-Text Search instead.',
includeInlineComment: true,
},
tsquery: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL full-text search. Use SQL Server Full-Text Search instead.',
includeInlineComment: true,
},
// Range types (no SQL Server equivalent)
int4range: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
int8range: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
numrange: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
tsrange: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
tstzrange: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
daterange: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL range type. Consider using two columns for lower/upper bounds.',
includeInlineComment: true,
},
// OID and system types
oid: { targetType: 'INT' },
regproc: { targetType: 'NVARCHAR', defaultLength: 255 },
regprocedure: { targetType: 'NVARCHAR', defaultLength: 255 },
regoper: { targetType: 'NVARCHAR', defaultLength: 255 },
regoperator: { targetType: 'NVARCHAR', defaultLength: 255 },
regclass: { targetType: 'NVARCHAR', defaultLength: 255 },
regtype: { targetType: 'NVARCHAR', defaultLength: 255 },
regrole: { targetType: 'NVARCHAR', defaultLength: 255 },
regnamespace: { targetType: 'NVARCHAR', defaultLength: 255 },
regconfig: { targetType: 'NVARCHAR', defaultLength: 255 },
regdictionary: { targetType: 'NVARCHAR', defaultLength: 255 },
// XML type
xml: { targetType: 'XML' },
// User-defined and array types
'user-defined': {
targetType: 'NVARCHAR(MAX)',
conversionNote: 'PostgreSQL custom type converted to NVARCHAR(MAX)',
includeInlineComment: true,
},
array: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'PostgreSQL array converted to NVARCHAR(MAX) as JSON array',
includeInlineComment: true,
},
// Enum type (handled specially)
enum: {
targetType: 'NVARCHAR',
defaultLength: 255,
conversionNote: 'PostgreSQL ENUM converted to NVARCHAR',
includeInlineComment: true,
},
};
/**
* Index type mappings from PostgreSQL to MySQL
*/
export const postgresqlIndexTypeToMySQL: IndexTypeMappingTable = {
btree: { targetType: 'BTREE' },
hash: { targetType: 'HASH' },
gin: {
targetType: 'BTREE',
note: 'GIN index downgraded to BTREE (MySQL does not support GIN)',
},
gist: {
targetType: 'BTREE',
note: 'GiST index downgraded to BTREE (MySQL does not support GiST)',
},
spgist: {
targetType: 'BTREE',
note: 'SP-GiST index downgraded to BTREE (MySQL does not support SP-GiST)',
},
brin: {
targetType: 'BTREE',
note: 'BRIN index downgraded to BTREE (MySQL does not support BRIN)',
},
};
/**
* Index type mappings from PostgreSQL to SQL Server
*/
export const postgresqlIndexTypeToSQLServer: IndexTypeMappingTable = {
btree: { targetType: 'NONCLUSTERED' },
hash: {
targetType: 'NONCLUSTERED',
note: 'Hash index converted to NONCLUSTERED',
},
gin: {
targetType: 'NONCLUSTERED',
note: 'GIN index downgraded to NONCLUSTERED. Consider using Full-Text Index.',
},
gist: {
targetType: 'SPATIAL',
note: 'GiST index converted to SPATIAL (for geometry types) or NONCLUSTERED',
},
spgist: {
targetType: 'NONCLUSTERED',
note: 'SP-GiST index converted to NONCLUSTERED',
},
brin: {
targetType: 'NONCLUSTERED',
note: 'BRIN index converted to NONCLUSTERED',
},
clustered: { targetType: 'CLUSTERED' },
nonclustered: { targetType: 'NONCLUSTERED' },
};
/**
* Get the type mapping for a PostgreSQL type to a target dialect
*/
export function getTypeMapping(
postgresType: string,
targetDialect: 'mysql' | 'sqlserver'
): TypeMapping | undefined {
const normalizedType = postgresType.toLowerCase().trim();
// Check for array types
if (normalizedType.endsWith('[]')) {
return targetDialect === 'mysql'
? postgresqlToMySQL['array']
: postgresqlToSQLServer['array'];
}
const mappingTable =
targetDialect === 'mysql' ? postgresqlToMySQL : postgresqlToSQLServer;
return mappingTable[normalizedType];
}
/**
* Get fallback type mapping when no explicit mapping exists
*/
export function getFallbackTypeMapping(
targetDialect: 'mysql' | 'sqlserver'
): TypeMapping {
return targetDialect === 'mysql'
? {
targetType: 'TEXT',
conversionNote: 'Unknown PostgreSQL type converted to TEXT',
includeInlineComment: true,
}
: {
targetType: 'NVARCHAR(MAX)',
conversionNote:
'Unknown PostgreSQL type converted to NVARCHAR(MAX)',
includeInlineComment: true,
};
}

View File

@@ -0,0 +1,42 @@
/**
* Shared type definitions for cross-dialect SQL export.
* These types are used across all source→target dialect mappings.
*/
/**
* Represents a type mapping from a source database type to a target type.
*/
export interface TypeMapping {
/** The target database type name */
targetType: string;
/** Optional comment/warning about the conversion */
conversionNote?: string;
/** Whether the original type info should be included as inline comment */
includeInlineComment?: boolean;
/** For types that need length specification */
defaultLength?: number;
/** For types that need precision */
defaultPrecision?: number;
/** For types that need scale */
defaultScale?: number;
}
/**
* A table of type mappings keyed by source type name.
*/
export type TypeMappingTable = Record<string, TypeMapping>;
/**
* Represents an index type mapping from source to target database.
*/
export interface IndexTypeMapping {
/** The target index type name */
targetType: string;
/** Optional note about the conversion */
note?: string;
}
/**
* A table of index type mappings keyed by source index type.
*/
export type IndexTypeMappingTable = Record<string, IndexTypeMapping>;

View File

@@ -0,0 +1,368 @@
/**
* Detects PostgreSQL features that cannot be fully converted to target dialects.
* Used to generate warning comments in cross-dialect exports.
*/
import type { Diagram } from '@/lib/domain/diagram';
import type { DBTable } from '@/lib/domain/db-table';
import type { DBField } from '@/lib/domain/db-field';
import type { DBIndex } from '@/lib/domain/db-index';
import type { DBCustomType } from '@/lib/domain/db-custom-type';
import { DatabaseType } from '@/lib/domain/database-type';
import {
getTypeMapping,
postgresqlIndexTypeToMySQL,
postgresqlIndexTypeToSQLServer,
} from './postgresql/type-mappings';
export type UnsupportedFeatureType =
| 'type'
| 'index'
| 'constraint'
| 'default'
| 'custom_type'
| 'array'
| 'schema';
export interface UnsupportedFeature {
type: UnsupportedFeatureType;
tableName?: string;
objectName: string;
feature: string;
recommendation: string;
}
/**
* Detect all unsupported PostgreSQL features when converting to a target dialect
*/
export function detectUnsupportedFeatures(
diagram: Diagram,
targetDialect: DatabaseType
): UnsupportedFeature[] {
const features: UnsupportedFeature[] = [];
const dialectKey =
targetDialect === DatabaseType.SQL_SERVER ? 'sqlserver' : 'mysql';
// Check custom types (ENUMs and composites)
if (diagram.customTypes && diagram.customTypes.length > 0) {
features.push(
...detectCustomTypeIssues(diagram.customTypes, dialectKey)
);
}
// Check each table
if (diagram.tables) {
for (const table of diagram.tables) {
if (table.isView) continue;
// Check fields
features.push(...detectFieldIssues(table, dialectKey));
// Check indexes
features.push(...detectIndexIssues(table, dialectKey));
}
}
return features;
}
/**
* Detect issues with custom types (ENUMs, composites)
*/
function detectCustomTypeIssues(
customTypes: DBCustomType[],
dialectKey: 'mysql' | 'sqlserver'
): UnsupportedFeature[] {
const features: UnsupportedFeature[] = [];
for (const customType of customTypes) {
const typeName = customType.schema
? `${customType.schema}.${customType.name}`
: customType.name;
if (customType.kind === 'enum') {
const values = customType.values?.join("', '") || '';
features.push({
type: 'custom_type',
objectName: typeName,
feature: `ENUM type with values: '${values}'`,
recommendation:
dialectKey === 'mysql'
? `Converted to VARCHAR(255). Consider using MySQL ENUM or CHECK constraint.`
: `Converted to NVARCHAR(255). Consider using CHECK constraint.`,
});
} else if (customType.kind === 'composite') {
const fields =
customType.fields?.map((f) => `${f.field}: ${f.type}`) || [];
features.push({
type: 'custom_type',
objectName: typeName,
feature: `Composite type with fields: ${fields.join(', ')}`,
recommendation:
dialectKey === 'mysql'
? `Converted to JSON. Consider restructuring as separate columns or JSON.`
: `Converted to NVARCHAR(MAX) as JSON. Consider restructuring as separate columns.`,
});
}
}
return features;
}
/**
* Detect issues with field types and defaults
*/
function detectFieldIssues(
table: DBTable,
dialectKey: 'mysql' | 'sqlserver'
): UnsupportedFeature[] {
const features: UnsupportedFeature[] = [];
const tableName = table.schema
? `${table.schema}.${table.name}`
: table.name;
for (const field of table.fields) {
const typeName = field.type.name.toLowerCase();
// Check for array types
if (field.isArray || typeName.endsWith('[]')) {
features.push({
type: 'array',
tableName,
objectName: field.name,
feature: `Array type: ${typeName}`,
recommendation:
dialectKey === 'mysql'
? `Converted to JSON. Use JSON_ARRAY() for inserts.`
: `Converted to NVARCHAR(MAX) as JSON array.`,
});
}
// Check type mapping for conversion notes
const mapping = getTypeMapping(typeName, dialectKey);
if (mapping?.conversionNote) {
features.push({
type: 'type',
tableName,
objectName: field.name,
feature: `Type: ${typeName}`,
recommendation: mapping.conversionNote,
});
}
// Check for PostgreSQL-specific defaults
if (field.default) {
const defaultLower = field.default.toLowerCase();
// Sequences
if (defaultLower.includes('nextval')) {
const match = field.default.match(/nextval\('([^']+)'/);
const seqName = match ? match[1] : 'unknown';
features.push({
type: 'default',
tableName,
objectName: field.name,
feature: `Sequence: ${seqName}`,
recommendation:
dialectKey === 'mysql'
? `Converted to AUTO_INCREMENT.`
: `Converted to IDENTITY(1,1).`,
});
}
// PostgreSQL-specific functions
if (
defaultLower.includes('gen_random_uuid') ||
defaultLower.includes('uuid_generate')
) {
features.push({
type: 'default',
tableName,
objectName: field.name,
feature: `UUID generation function`,
recommendation:
dialectKey === 'mysql'
? `Use UUID() function in MySQL.`
: `Use NEWID() function in SQL Server.`,
});
}
// Array constructors
if (
defaultLower.includes('array[') ||
defaultLower.includes("'{}")
) {
features.push({
type: 'default',
tableName,
objectName: field.name,
feature: `Array default value`,
recommendation:
dialectKey === 'mysql'
? `Converted to JSON array literal.`
: `Converted to JSON array string.`,
});
}
}
}
return features;
}
/**
* Detect issues with index types
*/
function detectIndexIssues(
table: DBTable,
dialectKey: 'mysql' | 'sqlserver'
): UnsupportedFeature[] {
const features: UnsupportedFeature[] = [];
const tableName = table.schema
? `${table.schema}.${table.name}`
: table.name;
const indexTypeMap =
dialectKey === 'mysql'
? postgresqlIndexTypeToMySQL
: postgresqlIndexTypeToSQLServer;
for (const index of table.indexes) {
if (index.isPrimaryKey) continue;
const indexType = (index.type || 'btree').toLowerCase();
const mapping = indexTypeMap[indexType];
if (mapping?.note) {
features.push({
type: 'index',
tableName,
objectName: index.name,
feature: `${indexType.toUpperCase()} index`,
recommendation: mapping.note,
});
}
}
return features;
}
/**
* Format unsupported features as a warning comment block for SQL output
*/
export function formatWarningsHeader(
features: UnsupportedFeature[],
sourceDialect: string,
targetDialect: string
): string {
if (features.length === 0) {
return `-- ${sourceDialect} to ${targetDialect} conversion\n-- Generated by ChartDB\n`;
}
let header = `-- ${sourceDialect} to ${targetDialect} conversion\n`;
header += `-- Generated by ChartDB\n`;
header += `--\n`;
header += `-- CONVERSION NOTES (${features.length} items):\n`;
// Group by type
const grouped = groupFeaturesByType(features);
for (const [type, items] of Object.entries(grouped)) {
header += `--\n`;
header += `-- ${formatTypeLabel(type as UnsupportedFeatureType)}:\n`;
for (const item of items) {
const location = item.tableName
? `${item.tableName}.${item.objectName}`
: item.objectName;
header += `-- - ${location}: ${item.feature}\n`;
}
}
header += `--\n\n`;
return header;
}
/**
* Group features by their type for organized output
*/
function groupFeaturesByType(
features: UnsupportedFeature[]
): Record<string, UnsupportedFeature[]> {
const grouped: Record<string, UnsupportedFeature[]> = {};
for (const feature of features) {
if (!grouped[feature.type]) {
grouped[feature.type] = [];
}
grouped[feature.type].push(feature);
}
return grouped;
}
/**
* Format type label for display
*/
function formatTypeLabel(type: UnsupportedFeatureType): string {
switch (type) {
case 'custom_type':
return 'Custom Types (ENUM/Composite)';
case 'array':
return 'Array Fields';
case 'type':
return 'Type Conversions';
case 'index':
return 'Index Type Changes';
case 'default':
return 'Default Value Conversions';
case 'constraint':
return 'Constraint Changes';
case 'schema':
return 'Schema Changes';
default:
return type;
}
}
/**
* Get inline comment for a specific field conversion
*/
export function getFieldInlineComment(
field: DBField,
dialectKey: 'mysql' | 'sqlserver'
): string | null {
const typeName = field.type.name.toLowerCase();
// Array types
if (field.isArray || typeName.endsWith('[]')) {
return `Was: ${field.type.name} (PostgreSQL array)`;
}
// Check type mapping
const mapping = getTypeMapping(typeName, dialectKey);
if (mapping?.includeInlineComment && mapping.conversionNote) {
return `Was: ${field.type.name}`;
}
return null;
}
/**
* Get inline comment for an index conversion
*/
export function getIndexInlineComment(
index: DBIndex,
dialectKey: 'mysql' | 'sqlserver'
): string | null {
const indexType = (index.type || 'btree').toLowerCase();
const indexTypeMap =
dialectKey === 'mysql'
? postgresqlIndexTypeToMySQL
: postgresqlIndexTypeToSQLServer;
const mapping = indexTypeMap[indexType];
if (mapping?.note) {
return mapping.note;
}
return null;
}

View File

@@ -8,6 +8,10 @@ import { exportMSSQL } from './export-per-type/mssql';
import { exportPostgreSQL } from './export-per-type/postgresql';
import { exportSQLite } from './export-per-type/sqlite';
import { exportMySQL } from './export-per-type/mysql';
import {
exportPostgreSQLToMySQL,
exportPostgreSQLToMSSQL,
} from './cross-dialect';
import { escapeSQLComment } from './export-per-type/common';
import {
databaseTypesWithCommentSupport,
@@ -158,6 +162,20 @@ export const exportBaseSQL = ({
}
}
// Deterministic cross-dialect exports (PostgreSQL to MySQL/SQL Server)
// These do not use LLM and provide consistent, predictable output
if (!isDBMLFlow && diagram.databaseType === DatabaseType.POSTGRESQL) {
if (
targetDatabaseType === DatabaseType.MYSQL ||
targetDatabaseType === DatabaseType.MARIADB
) {
return exportPostgreSQLToMySQL({ diagram, onlyRelationships });
}
if (targetDatabaseType === DatabaseType.SQL_SERVER) {
return exportPostgreSQLToMSSQL({ diagram, onlyRelationships });
}
}
// Filter out the tables that are views
const nonViewTables = tables.filter((table) => !table.isView);