mirror of
https://github.com/chartdb/chartdb.git
synced 2026-05-01 15:19:41 -05:00
feat: add import logic based on the JSON input
This commit is contained in:
committed by
Jonathan Fishner
parent
f8f32385c9
commit
939ac2295f
@@ -1,4 +1,3 @@
|
||||
|
||||
<h1 align="center">
|
||||
<a href="https://chartdb.io"><img src="https://github.com/chartdb/chartdb/blob/main/src/assets/logo.png?raw=true" width="400" height="70" alt="ChartDB"></a>
|
||||
<br>
|
||||
@@ -34,7 +33,7 @@
|
||||
**🎉 ChartDB**:
|
||||
ChartDB is a powerful and intuitive database diagramming editor that runs as a SaaS application via a website.
|
||||
Effortlessly visualize and design your database schemas with a single "**Smart Query**" that fetches your entire database structure.
|
||||
Customize your diagrams, export SQL scripts, and explore a full suite of features—all without the need to create an account.
|
||||
Customize your diagrams, export SQL scripts, and explore a full suite of features—all without creating an account.
|
||||
Dive into a seamless database design experience and discover the complete feature set here.
|
||||
|
||||
## Status
|
||||
@@ -52,17 +51,17 @@ ChartDB is currently in Public Beta. Star and watch this repository to get notif
|
||||
|
||||
### Supported Databases
|
||||
|
||||
- ✅ PostgresSQL
|
||||
- ✅ MySQL
|
||||
- ✅ SQL Server
|
||||
- ✅ MariaDB
|
||||
- ✅ SQL Lite
|
||||
- ✅ PostgresSQL
|
||||
- ✅ MySQL
|
||||
- ✅ SQL Server
|
||||
- ✅ MariaDB
|
||||
- ✅ SQL Lite
|
||||
|
||||
## 💚 Community & Support
|
||||
|
||||
- [Slack](https://join.slack.com/t/chartdb/shared_invite/zt-2ourrlh5e-mKIHCRML3_~m_gHjD5EcUg) (For live discussion with the community and the ChartDB team)
|
||||
- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
|
||||
- [Twitter](https://x.com/chartdb_io) (Get news fast)
|
||||
- [Slack](https://join.slack.com/t/chartdb/shared_invite/zt-2ourrlh5e-mKIHCRML3_~m_gHjD5EcUg) (For live discussion with the community and the ChartDB team)
|
||||
- [GitHub Issues](https://github.com/chartdb/chartdb/issues) (For any bugs and errors you encounter using ChartDB)
|
||||
- [Twitter](https://x.com/chartdb_io) (Get news fast)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
@@ -66,11 +66,33 @@ export const StorageProvider: React.FC<React.PropsWithChildren> = ({
|
||||
}: {
|
||||
diagram: Diagram;
|
||||
}) => {
|
||||
await db.diagrams.add({
|
||||
// Extract tables and relationships from the diagram
|
||||
const { tables, relationships } = diagram;
|
||||
|
||||
// Save the diagram without the tables and relationships
|
||||
const diagramId = await db.diagrams.add({
|
||||
id: diagram.id,
|
||||
name: diagram.name,
|
||||
databaseType: diagram.databaseType,
|
||||
});
|
||||
|
||||
// Save each table associated with this diagram
|
||||
if (tables && tables.length > 0) {
|
||||
await Promise.all(
|
||||
tables.map(async (table) => {
|
||||
await addTable({ diagramId, table });
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Save each relationship associated with this diagram
|
||||
if (relationships && relationships.length > 0) {
|
||||
await Promise.all(
|
||||
relationships.map(async (relationship) => {
|
||||
await addRelationship({ diagramId, relationship });
|
||||
})
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const listDiagrams: StorageContext['listDiagrams'] = async (): Promise<
|
||||
@@ -169,7 +191,7 @@ export const StorageProvider: React.FC<React.PropsWithChildren> = ({
|
||||
return await db.db_tables
|
||||
.where('diagramId')
|
||||
.equals(diagramId)
|
||||
.toArray();
|
||||
.sortBy('name'); // Sort by the 'name' field
|
||||
};
|
||||
|
||||
const addRelationship: StorageContext['addRelationship'] = async ({
|
||||
|
||||
@@ -16,11 +16,11 @@ import { getDatabaseLogo } from '@/lib/databases';
|
||||
import { CodeSnippet } from '@/components/code-snippet/code-snippet';
|
||||
import { Textarea } from '@/components/textarea/textarea';
|
||||
import { useStorage } from '@/hooks/use-storage';
|
||||
import { Diagram } from '@/lib/domain/diagram';
|
||||
import { generateId } from '@/lib/utils';
|
||||
import { loadFromDatabaseMetadata } from '@/lib/domain/diagram';
|
||||
import { useCreateDiagramDialog } from '@/hooks/use-create-diagram-dialog';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useConfig } from '@/hooks/use-config';
|
||||
import { DatabaseMetadata } from '@/lib/import-script-types/database-metadata';
|
||||
|
||||
enum CreateDiagramDialogStep {
|
||||
SELECT_DATABASE = 'SELECT_DATABASE',
|
||||
@@ -56,24 +56,25 @@ export const CreateDiagramDialog: React.FC<CreateDiagramDialogProps> = ({
|
||||
}, [listDiagrams, setDiagramNumber]);
|
||||
|
||||
const createNewDiagram = useCallback(async () => {
|
||||
const diagram: Diagram = {
|
||||
id: generateId(),
|
||||
name: `Diagram ${diagramNumber}`,
|
||||
console.log('scriptResult: ', scriptResult);
|
||||
const databaseMetadata: DatabaseMetadata = JSON.parse(scriptResult);
|
||||
const diagram = loadFromDatabaseMetadata(
|
||||
databaseType,
|
||||
tables: [],
|
||||
relationships: [],
|
||||
};
|
||||
databaseMetadata
|
||||
);
|
||||
console.log('diagram: ', diagram);
|
||||
|
||||
await addDiagram({ diagram });
|
||||
await updateConfig({ defaultDiagramId: diagram.id });
|
||||
closeCreateDiagramDialog();
|
||||
navigate(`/diagrams/${diagram.id}`);
|
||||
}, [
|
||||
databaseType,
|
||||
diagramNumber,
|
||||
addDiagram,
|
||||
closeCreateDiagramDialog,
|
||||
navigate,
|
||||
updateConfig,
|
||||
scriptResult,
|
||||
]);
|
||||
|
||||
const renderDatabaseOption = useCallback((type: DatabaseType) => {
|
||||
|
||||
@@ -42,6 +42,7 @@ export const postgresDataTypes = [
|
||||
// Character Types
|
||||
'char',
|
||||
'varchar',
|
||||
'character varying',
|
||||
'text',
|
||||
|
||||
// Binary Data Types
|
||||
@@ -119,6 +120,9 @@ export const postgresDataTypes = [
|
||||
'regnamespace',
|
||||
'regconfig',
|
||||
'regdictionary',
|
||||
|
||||
// User Defined types
|
||||
'user-defined',
|
||||
] as const;
|
||||
export const mysqlDataTypes = [
|
||||
// Numeric Types
|
||||
|
||||
@@ -26,14 +26,23 @@ const rawPostgresQuery = '';
|
||||
// contype = 'f'
|
||||
// AND connamespace = 'public'::regnamespace) as x
|
||||
// ), pk_info AS (
|
||||
// SELECT 'public' as schema_name,
|
||||
// SELECT array_to_string(array_agg(CONCAT('{"schema":"', schema_name, '"',
|
||||
// ',"table":"', pk_table, '"',
|
||||
// ',"column":"', pk_column, '"',
|
||||
// ',"pk_def":"', pk_def,
|
||||
// '"}')), ',') as pk_metadata
|
||||
// FROM (
|
||||
// SELECT
|
||||
// 'public' as schema_name,
|
||||
// conrelid::regclass::text AS pk_table,
|
||||
// array_length(string_to_array(substring(pg_get_constraintdef(oid) FROM '\((.*?)\)'), ','), 1) AS field_count,
|
||||
// regexp_split_to_table(substring(pg_get_constraintdef(oid) FROM '\((.*)\)'),',') AS pk_column,
|
||||
// unnest(string_to_array(substring(pg_get_constraintdef(oid) FROM '\((.*?)\)'), ',')) AS pk_column,
|
||||
// pg_get_constraintdef(oid) as pk_def
|
||||
// FROM pg_constraint c
|
||||
// where connamespace = 'public'::regnamespace
|
||||
// AND c.contype = 'p'
|
||||
// FROM
|
||||
// pg_constraint
|
||||
// WHERE
|
||||
// contype = 'p'
|
||||
// AND connamespace = 'public'::regnamespace
|
||||
// ) as y
|
||||
// ),
|
||||
// indexes_cols as (
|
||||
// select tnsp.nspname as schema_name,
|
||||
@@ -63,14 +72,11 @@ const rawPostgresQuery = '';
|
||||
// select array_to_string(array_agg(CONCAT('{"schema":"', cols.table_schema,
|
||||
// '","table":"', cols.table_name,
|
||||
// '","name":"', cols.column_name,
|
||||
// '","type":"', replace(cols.data_type, '"', ''),
|
||||
// '","type":"', LOWER(replace(cols.data_type, '"', '')),
|
||||
// '","ordinal_position":"', cols.ordinal_position,
|
||||
// '","nullable":', case when (cols.IS_NULLABLE = 'YES') then 'true' else 'false' end,
|
||||
// ',"is_pk":', case when (pk_column is not null) then 'true' else 'false' end,
|
||||
// ',"collation":"', coalesce(cols.COLLATION_NAME, ''), '"}')), ',') as cols_metadata
|
||||
// from information_schema.columns cols
|
||||
// left join pk_info as pk
|
||||
// on pk.schema_name = cols.table_schema and pk_table = cols.table_name and pk_column = cols.column_name
|
||||
// where cols.table_schema not in ('information_schema', 'pg_catalog')
|
||||
// ), indexes_metadata as (
|
||||
// select array_to_string(array_agg(CONCAT('{"schema":"', schema_name,
|
||||
@@ -107,12 +113,13 @@ const rawPostgresQuery = '';
|
||||
// where views.schemaname not in ('information_schema', 'pg_catalog')
|
||||
// )
|
||||
// select CONCAT('{ "fk_info": [', coalesce(fk_metadata, ''),
|
||||
// '], "pk_info": [', COALESCE(pk_metadata, ''),
|
||||
// '], "columns": [', coalesce(cols_metadata, ''),
|
||||
// '], "indexes": [', coalesce(indexes_metadata, ''),
|
||||
// '], "tables":[', coalesce(tbls_metadata, ''),
|
||||
// '], "views":[', coalesce(views_metadata, ''),
|
||||
// '], "server_name": "', '', '", "version": "', '',
|
||||
// '"}') as " "
|
||||
// from cols,indexes_metadata, tbls, config, views, fk_info;
|
||||
// from fk_info, pk_info, cols, indexes_metadata, tbls, config, views;
|
||||
// `;
|
||||
export const postgresQuery = minimizeQuery(rawPostgresQuery);
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import { ForeignKeyInfo } from '../import-script-types/foreign-key-info';
|
||||
import { DBTable } from './db-table';
|
||||
import { generateId } from '@/lib/utils';
|
||||
|
||||
export interface DBRelationship {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -10,3 +14,38 @@ export interface DBRelationship {
|
||||
}
|
||||
|
||||
export type RelationshipType = 'one_to_one' | 'one_to_many' | 'many_to_one';
|
||||
|
||||
export const createRelationships = (
|
||||
fkInfo: ForeignKeyInfo[],
|
||||
tables: DBTable[]
|
||||
): DBRelationship[] => {
|
||||
return fkInfo
|
||||
.map((fk: ForeignKeyInfo) => {
|
||||
const sourceTable = tables.find((table) => table.name === fk.table);
|
||||
const targetTable = tables.find(
|
||||
(table) => table.name === fk.reference_table
|
||||
);
|
||||
const sourceField = sourceTable?.fields.find(
|
||||
(field) => field.name === fk.column
|
||||
);
|
||||
const targetField = targetTable?.fields.find(
|
||||
(field) => field.name === fk.reference_column
|
||||
);
|
||||
|
||||
if (sourceTable && targetTable && sourceField && targetField) {
|
||||
return {
|
||||
id: generateId(),
|
||||
name: fk.foreign_key_name,
|
||||
sourceTableId: sourceTable.id,
|
||||
targetTableId: targetTable.id,
|
||||
sourceFieldId: sourceField.id,
|
||||
targetFieldId: targetField.id,
|
||||
type: 'many_to_one', // This could be adjusted based on your logic
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((rel) => rel !== null) as DBRelationship[];
|
||||
};
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { DBIndex } from './db-index';
|
||||
import { DBField } from './db-field';
|
||||
import { TableInfo } from '../import-script-types/table-info';
|
||||
import { ColumnInfo } from '../import-script-types/column-info';
|
||||
import { IndexInfo } from '../import-script-types/index-info';
|
||||
import { generateId, randomHSLA } from '@/lib/utils';
|
||||
import { DBRelationship } from './db-relationship';
|
||||
import { PrimaryKeyInfo } from '../import-script-types/primary-key-info';
|
||||
|
||||
export interface DBTable {
|
||||
id: string;
|
||||
@@ -11,3 +17,159 @@ export interface DBTable {
|
||||
color: string;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
export const createTables = (
|
||||
tableInfos: TableInfo[],
|
||||
columns: ColumnInfo[],
|
||||
indexes: IndexInfo[],
|
||||
primaryKeys: PrimaryKeyInfo[]
|
||||
): DBTable[] => {
|
||||
return tableInfos.map((tableInfo: TableInfo) => {
|
||||
// Filter, make unique, and sort columns based on ordinal_position
|
||||
const uniqueColumns = new Map<string, ColumnInfo>();
|
||||
columns
|
||||
.filter((col) => col.table === tableInfo.table)
|
||||
.forEach((col) => {
|
||||
if (!uniqueColumns.has(col.name)) {
|
||||
uniqueColumns.set(col.name, col);
|
||||
}
|
||||
});
|
||||
|
||||
const sortedColumns = Array.from(uniqueColumns.values()).sort(
|
||||
(a, b) => {
|
||||
return a.ordinal_position - b.ordinal_position;
|
||||
}
|
||||
);
|
||||
|
||||
const tablePrimaryKeys = primaryKeys.filter(
|
||||
(pk) => pk.table === tableInfo.table
|
||||
);
|
||||
|
||||
const tableIndexes = indexes.filter(
|
||||
(idx) => idx.table === tableInfo.table
|
||||
);
|
||||
|
||||
const fields: DBField[] = sortedColumns.map((col: ColumnInfo) => ({
|
||||
id: generateId(),
|
||||
name: col.name,
|
||||
type: col.type as any,
|
||||
primaryKey: tablePrimaryKeys.some((pk) => pk.column === col.name),
|
||||
unique: tableIndexes.some(
|
||||
(idx) => idx.column === col.name && idx.unique
|
||||
),
|
||||
nullable: col.nullable,
|
||||
createdAt: Date.now(),
|
||||
}));
|
||||
|
||||
const dbIndexes: DBIndex[] = tableIndexes.map((idx: IndexInfo) => ({
|
||||
id: generateId(),
|
||||
name: idx.name,
|
||||
unique: idx.unique,
|
||||
fieldIds: fields
|
||||
.filter((field) => field.name === idx.column)
|
||||
.map((field) => field.id),
|
||||
createdAt: Date.now(),
|
||||
}));
|
||||
|
||||
// Initial random positions; these will be adjusted later
|
||||
return {
|
||||
id: generateId(),
|
||||
name: tableInfo.table,
|
||||
x: Math.random() * 1000, // Placeholder X
|
||||
y: Math.random() * 800, // Placeholder Y
|
||||
fields,
|
||||
indexes: dbIndexes,
|
||||
color: randomHSLA(),
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const adjustTablePositions = (
|
||||
tables: DBTable[],
|
||||
relationships: DBRelationship[]
|
||||
): DBTable[] => {
|
||||
const tableWidth = 200;
|
||||
const tableHeight = 300; // Approximate height of each table, adjust as needed
|
||||
const gapX = 55;
|
||||
const gapY = 42;
|
||||
const maxTablesPerRow = 6; // Maximum number of tables per row
|
||||
const startX = 100;
|
||||
const startY = 100;
|
||||
|
||||
let currentX = startX;
|
||||
let currentY = startY;
|
||||
let tablesInCurrentRow = 0;
|
||||
|
||||
// Step 1: Identify the most connected table and sort the tables by their connectivity
|
||||
const tableConnections = new Map<string, number>();
|
||||
relationships.forEach((rel) => {
|
||||
tableConnections.set(
|
||||
rel.sourceTableId,
|
||||
(tableConnections.get(rel.sourceTableId) || 0) + 1
|
||||
);
|
||||
tableConnections.set(
|
||||
rel.targetTableId,
|
||||
(tableConnections.get(rel.targetTableId) || 0) + 1
|
||||
);
|
||||
});
|
||||
|
||||
const sortedTableIds = [...tableConnections.entries()]
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.map((entry) => entry[0]);
|
||||
|
||||
const positionedTables = new Set<string>();
|
||||
|
||||
const positionTable = (tableId: string) => {
|
||||
const table = tables.find((t) => t.id === tableId);
|
||||
if (!table || positionedTables.has(tableId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set the X and Y positions
|
||||
table.x = currentX;
|
||||
table.y = currentY;
|
||||
positionedTables.add(tableId);
|
||||
|
||||
// Update position for the next table
|
||||
tablesInCurrentRow++;
|
||||
if (tablesInCurrentRow >= maxTablesPerRow) {
|
||||
tablesInCurrentRow = 0;
|
||||
currentX = startX;
|
||||
currentY += tableHeight + gapY;
|
||||
} else {
|
||||
currentX += tableWidth + gapX;
|
||||
}
|
||||
|
||||
// Position connected tables recursively
|
||||
const connectedTables = relationships
|
||||
.filter(
|
||||
(rel) =>
|
||||
rel.sourceTableId === tableId ||
|
||||
rel.targetTableId === tableId
|
||||
)
|
||||
.map((rel) =>
|
||||
rel.sourceTableId === tableId
|
||||
? rel.targetTableId
|
||||
: rel.sourceTableId
|
||||
);
|
||||
|
||||
connectedTables.forEach(positionTable);
|
||||
};
|
||||
|
||||
// Step 2: Start positioning with the most connected tables first
|
||||
sortedTableIds.forEach((tableId) => {
|
||||
if (!positionedTables.has(tableId)) {
|
||||
positionTable(tableId);
|
||||
}
|
||||
});
|
||||
|
||||
// Step 3: Handle any remaining unpositioned tables
|
||||
tables.forEach((table) => {
|
||||
if (!positionedTables.has(table.id)) {
|
||||
positionTable(table.id);
|
||||
}
|
||||
});
|
||||
|
||||
return tables;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { DatabaseMetadata } from '../import-script-types/database-metadata';
|
||||
import { DatabaseType } from './database-type';
|
||||
import { DBRelationship } from './db-relationship';
|
||||
import { DBTable } from './db-table';
|
||||
|
||||
import { DBRelationship, createRelationships } from './db-relationship';
|
||||
import { DBTable, adjustTablePositions, createTables } from './db-table';
|
||||
import { generateId } from '@/lib/utils';
|
||||
export interface Diagram {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -9,3 +10,37 @@ export interface Diagram {
|
||||
tables?: DBTable[];
|
||||
relationships?: DBRelationship[];
|
||||
}
|
||||
|
||||
export const loadFromDatabaseMetadata = (
|
||||
databaseType: DatabaseType,
|
||||
dm: DatabaseMetadata
|
||||
): Diagram => {
|
||||
const {
|
||||
fk_info: foreignKeys,
|
||||
pk_info: PrimaryKeyInfo,
|
||||
tables: tableInfos,
|
||||
columns,
|
||||
indexes,
|
||||
} = dm;
|
||||
|
||||
// First pass: Create tables without final positions
|
||||
const tables = createTables(tableInfos, columns, indexes, PrimaryKeyInfo);
|
||||
|
||||
// First pass: Create relationships
|
||||
const relationships = createRelationships(foreignKeys, tables);
|
||||
|
||||
// Second pass: Adjust table positions based on relationships
|
||||
const adjustedTables = adjustTablePositions(tables, relationships);
|
||||
|
||||
const sortedTables = adjustedTables.sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
);
|
||||
|
||||
return {
|
||||
id: generateId(),
|
||||
name: dm.server_name || 'Unnamed Diagram',
|
||||
databaseType: databaseType || DatabaseType.GENERIC,
|
||||
tables: sortedTables,
|
||||
relationships,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
export interface ColumnInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
name: string;
|
||||
type: string;
|
||||
ordinal_position: number;
|
||||
nullable: boolean;
|
||||
collation: string;
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { ForeignKeyInfo } from './foreign-key-info';
|
||||
import { PrimaryKeyInfo } from './primary-key-info';
|
||||
import { ColumnInfo } from './column-info';
|
||||
import { IndexInfo } from './index-info';
|
||||
import { TableInfo } from './table-info';
|
||||
import { ViewInfo } from './view-info';
|
||||
export interface DatabaseMetadata {
|
||||
fk_info: ForeignKeyInfo[];
|
||||
pk_info: PrimaryKeyInfo[];
|
||||
columns: ColumnInfo[];
|
||||
indexes: IndexInfo[];
|
||||
tables: TableInfo[];
|
||||
views: ViewInfo[];
|
||||
server_name: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export function loadDatabaseMetadata(jsonString: string): DatabaseMetadata {
|
||||
try {
|
||||
const parsedData: DatabaseMetadata = JSON.parse(jsonString);
|
||||
return parsedData;
|
||||
} catch (parseError) {
|
||||
throw new Error(`Error parsing JSON data: ${parseError}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
export interface ForeignKeyInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
column: string;
|
||||
foreign_key_name: string;
|
||||
reference_table: string;
|
||||
reference_column: string;
|
||||
fk_def: string;
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
export interface IndexInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
name: string;
|
||||
column: string;
|
||||
index_type: string;
|
||||
cardinality: number;
|
||||
size: number;
|
||||
unique: boolean;
|
||||
is_partial_index: boolean;
|
||||
direction: string;
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
export interface PrimaryKeyInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
column: string;
|
||||
pk_def: string;
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
export interface TableInfo {
|
||||
schema: string;
|
||||
table: string;
|
||||
rows: number;
|
||||
type: string;
|
||||
engine: string;
|
||||
collation: string;
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface ViewInfo {
|
||||
schema: string;
|
||||
view_name: string;
|
||||
}
|
||||
Reference in New Issue
Block a user