mirror of
https://github.com/DRYTRIX/TimeTracker.git
synced 2026-01-21 20:09:57 -06:00
@@ -274,12 +274,21 @@ def create_app(config=None):
|
||||
app.jinja_loader = ChoiceLoader([app.jinja_loader, FileSystemLoader(extra_templates_path)])
|
||||
|
||||
# Prefer Postgres if POSTGRES_* envs are present but URL points to SQLite
|
||||
# BUT only if DATABASE_URL was not explicitly set to SQLite
|
||||
current_url = app.config.get("SQLALCHEMY_DATABASE_URI", "")
|
||||
explicit_database_url = os.getenv("DATABASE_URL", "")
|
||||
|
||||
# Only auto-switch to PostgreSQL if:
|
||||
# 1. Not in testing mode
|
||||
# 2. Current URL is SQLite
|
||||
# 3. POSTGRES_* env vars are present
|
||||
# 4. DATABASE_URL was NOT explicitly set to SQLite (respect user's explicit choice)
|
||||
if (
|
||||
not app.config.get("TESTING")
|
||||
and isinstance(current_url, str)
|
||||
and current_url.startswith("sqlite")
|
||||
and (os.getenv("POSTGRES_DB") or os.getenv("POSTGRES_USER") or os.getenv("POSTGRES_PASSWORD"))
|
||||
and not (explicit_database_url and explicit_database_url.startswith("sqlite"))
|
||||
):
|
||||
pg_user = os.getenv("POSTGRES_USER", "timetracker")
|
||||
pg_pass = os.getenv("POSTGRES_PASSWORD", "timetracker")
|
||||
|
||||
@@ -394,24 +394,35 @@ class Settings(db.Model):
|
||||
if settings:
|
||||
return settings
|
||||
except Exception as e:
|
||||
# Handle case where columns don't exist yet (migration not run)
|
||||
# Check if it's a column error - if so, it's expected during migrations
|
||||
# Handle case where table or columns don't exist yet (migration not run)
|
||||
# Check if it's a table/column error - if so, it's expected during migrations
|
||||
error_str = str(e)
|
||||
is_column_error = (
|
||||
"UndefinedColumn" in error_str
|
||||
or "does not exist" in error_str.lower()
|
||||
or "no such column" in error_str.lower()
|
||||
# Also check the underlying exception if it's a SQLAlchemy exception
|
||||
underlying_error = ""
|
||||
if hasattr(e, 'orig'):
|
||||
underlying_error = str(e.orig)
|
||||
elif hasattr(e, '__cause__') and e.__cause__:
|
||||
underlying_error = str(e.__cause__)
|
||||
|
||||
combined_error = f"{error_str} {underlying_error}".lower()
|
||||
is_schema_error = (
|
||||
"undefinedcolumn" in combined_error
|
||||
or "does not exist" in combined_error
|
||||
or "no such column" in combined_error
|
||||
or "no such table" in combined_error
|
||||
or ("relation" in combined_error and "does not exist" in combined_error)
|
||||
or "operationalerror" in combined_error and ("no such table" in combined_error or "does not exist" in combined_error)
|
||||
)
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if is_column_error:
|
||||
if is_schema_error:
|
||||
# This is expected during migrations when schema is incomplete
|
||||
# Only log at debug level to avoid cluttering logs
|
||||
logger.debug(
|
||||
f"Settings table schema incomplete (migration may be pending): {error_str.split('LINE')[0] if 'LINE' in error_str else error_str}"
|
||||
f"Settings table not available (migration may be pending): {error_str.split('LINE')[0] if 'LINE' in error_str else error_str}"
|
||||
)
|
||||
else:
|
||||
# Other errors should be logged as warnings
|
||||
|
||||
@@ -395,6 +395,46 @@ except:
|
||||
local current_revision=$(flask db current 2>/dev/null | tr -d '\n' || echo "none")
|
||||
log "Current migration revision: $current_revision"
|
||||
|
||||
# If we have alembic_version but no tables, check what revision is stored
|
||||
if [[ "$current_revision" != "none" ]] && [[ -n "$current_revision" ]]; then
|
||||
log "Database has alembic_version with revision: $current_revision"
|
||||
# Check if we actually have tables
|
||||
if [[ "$db_url" == sqlite://* ]]; then
|
||||
local db_file="${db_url#sqlite://}"
|
||||
if [[ -f "$db_file" ]]; then
|
||||
local table_count=$(python -c "
|
||||
import sqlite3
|
||||
try:
|
||||
conn = sqlite3.connect('$db_file')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('SELECT name FROM sqlite_master WHERE type=\"table\" AND name != \"sqlite_sequence\" AND name != \"alembic_version\"')
|
||||
tables = cursor.fetchall()
|
||||
conn.close()
|
||||
print(len(tables))
|
||||
except:
|
||||
print(0)
|
||||
" 2>/dev/null || echo "0")
|
||||
if [[ "$table_count" -eq 0 ]]; then
|
||||
log "⚠ Database has alembic_version but no application tables"
|
||||
log "This indicates a failed migration. Clearing alembic_version to start fresh..."
|
||||
python -c "
|
||||
import sqlite3
|
||||
try:
|
||||
conn = sqlite3.connect('$db_file')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('DROP TABLE IF EXISTS alembic_version')
|
||||
conn.commit()
|
||||
conn.close()
|
||||
print('Cleared stale alembic_version table')
|
||||
except Exception as e:
|
||||
print(f'Error: {e}')
|
||||
" 2>/dev/null || true
|
||||
current_revision="none"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if database has any tables (to determine if it's truly fresh)
|
||||
local has_tables=false
|
||||
if [[ "$db_url" == sqlite://* ]]; then
|
||||
@@ -460,20 +500,95 @@ except:
|
||||
# Apply any pending migrations
|
||||
log "Applying pending migrations..."
|
||||
|
||||
# Show migration output for debugging - capture both stdout and stderr
|
||||
# Ensure we're in the right directory and FLASK_APP is set
|
||||
cd /app
|
||||
export FLASK_APP=${FLASK_APP:-app.py}
|
||||
|
||||
# Run migration with proper error capture
|
||||
# Store output in a file and also show it in real-time for debugging
|
||||
MIGRATION_OUTPUT=$(mktemp)
|
||||
if ! flask db upgrade > "$MIGRATION_OUTPUT" 2>&1; then
|
||||
log "✗ Migration application failed"
|
||||
log "Error details:"
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
|
||||
set +e # Don't exit on error immediately
|
||||
|
||||
# Run migration with Python traceback enabled for better error visibility
|
||||
# Use 'heads' instead of 'head' to handle multiple migration branches
|
||||
PYTHONUNBUFFERED=1 python -u -c "
|
||||
import sys
|
||||
import traceback
|
||||
import os
|
||||
|
||||
# Set up environment
|
||||
os.environ['PYTHONUNBUFFERED'] = '1'
|
||||
|
||||
try:
|
||||
from flask import Flask
|
||||
from flask_migrate import upgrade
|
||||
from app import create_app
|
||||
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
try:
|
||||
# Use 'heads' to upgrade all migration heads (handles branching)
|
||||
upgrade(revision='heads')
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print('ERROR: Migration failed!', file=sys.stderr)
|
||||
print(f'Error type: {type(e).__name__}', file=sys.stderr)
|
||||
print(f'Error message: {e}', file=sys.stderr)
|
||||
print('\\nFull traceback:', file=sys.stderr)
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print('ERROR: Failed to initialize Flask app for migration!', file=sys.stderr)
|
||||
print(f'Error type: {type(e).__name__}', file=sys.stderr)
|
||||
print(f'Error message: {e}', file=sys.stderr)
|
||||
print('\\nFull traceback:', file=sys.stderr)
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>&1 | tee "$MIGRATION_OUTPUT"
|
||||
MIGRATION_EXIT_CODE=${PIPESTATUS[0]}
|
||||
|
||||
set -e # Re-enable exit on error
|
||||
|
||||
if [[ $MIGRATION_EXIT_CODE -ne 0 ]]; then
|
||||
log "✗ Migration application failed (exit code: $MIGRATION_EXIT_CODE)"
|
||||
log "Full error output:"
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
else
|
||||
log "No output captured - migration may have failed before producing output"
|
||||
fi
|
||||
# Get additional debugging info
|
||||
log "Debugging information:"
|
||||
log "Current migration revision:"
|
||||
flask db current 2>&1 || log " (could not determine)"
|
||||
if [[ "$db_url" == sqlite://* ]]; then
|
||||
local db_file="${db_url#sqlite://}"
|
||||
log "Database file: $db_file"
|
||||
if [[ -f "$db_file" ]]; then
|
||||
log " File exists: yes"
|
||||
log " File size: $(stat -c%s "$db_file" 2>/dev/null || echo 'unknown') bytes"
|
||||
log " Tables in database:"
|
||||
python -c "
|
||||
import sqlite3
|
||||
try:
|
||||
conn = sqlite3.connect('$db_file')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(\"SELECT name FROM sqlite_master WHERE type='table'\")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
conn.close()
|
||||
for table in tables:
|
||||
print(f' - {table}')
|
||||
except Exception as e:
|
||||
print(f' Error: {e}')
|
||||
" 2>&1 || true
|
||||
else
|
||||
log " File exists: no"
|
||||
fi
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
return 1
|
||||
fi
|
||||
# Show migration output even on success for debugging
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
log "Migration output:"
|
||||
cat "$MIGRATION_OUTPUT" | head -30
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
log "✓ Migrations applied"
|
||||
|
||||
@@ -517,17 +632,27 @@ except:
|
||||
# Apply migration
|
||||
log "Applying initial migration..."
|
||||
MIGRATION_OUTPUT=$(mktemp)
|
||||
if ! flask db upgrade > "$MIGRATION_OUTPUT" 2>&1; then
|
||||
log "✗ Initial migration application failed"
|
||||
set +e # Don't exit on error immediately
|
||||
# Use 'heads' to handle multiple migration branches
|
||||
PYTHONUNBUFFERED=1 flask db upgrade heads > "$MIGRATION_OUTPUT" 2>&1
|
||||
MIGRATION_EXIT_CODE=$?
|
||||
set -e # Re-enable exit on error
|
||||
|
||||
if [[ $MIGRATION_EXIT_CODE -ne 0 ]]; then
|
||||
log "✗ Initial migration application failed (exit code: $MIGRATION_EXIT_CODE)"
|
||||
log "Error details:"
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
else
|
||||
log "No output captured from migration command"
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
return 1
|
||||
fi
|
||||
# Show migration output even on success for debugging
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
log "Migration output:"
|
||||
cat "$MIGRATION_OUTPUT" | head -30
|
||||
cat "$MIGRATION_OUTPUT" | head -50
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
log "✓ Initial migration applied"
|
||||
@@ -629,17 +754,27 @@ except Exception as e:
|
||||
|
||||
# Check for pending migrations
|
||||
MIGRATION_OUTPUT=$(mktemp)
|
||||
if ! flask db upgrade > "$MIGRATION_OUTPUT" 2>&1; then
|
||||
log "✗ Migration check failed"
|
||||
set +e # Don't exit on error immediately
|
||||
# Use 'heads' to handle multiple migration branches
|
||||
PYTHONUNBUFFERED=1 flask db upgrade heads > "$MIGRATION_OUTPUT" 2>&1
|
||||
MIGRATION_EXIT_CODE=$?
|
||||
set -e # Re-enable exit on error
|
||||
|
||||
if [[ $MIGRATION_EXIT_CODE -ne 0 ]]; then
|
||||
log "✗ Migration check failed (exit code: $MIGRATION_EXIT_CODE)"
|
||||
log "Error details:"
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
cat "$MIGRATION_OUTPUT"
|
||||
else
|
||||
log "No output captured from migration command"
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
return 1
|
||||
fi
|
||||
# Show migration output even on success for debugging
|
||||
if [[ -s "$MIGRATION_OUTPUT" ]]; then
|
||||
log "Migration output:"
|
||||
cat "$MIGRATION_OUTPUT" | head -30
|
||||
cat "$MIGRATION_OUTPUT" | head -50
|
||||
fi
|
||||
rm -f "$MIGRATION_OUTPUT"
|
||||
log "✓ Migrations checked and applied"
|
||||
|
||||
@@ -73,19 +73,39 @@ def run_migrations_online():
|
||||
|
||||
connectable = current_app.extensions['migrate'].db.get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
try:
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
except Exception as e:
|
||||
# Log the full error with traceback for debugging
|
||||
import traceback
|
||||
logger.error(f"Migration failed with error: {e}")
|
||||
logger.error(f"Traceback:\n{traceback.format_exc()}")
|
||||
# Re-raise to ensure the migration command fails properly
|
||||
raise
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
try:
|
||||
run_migrations_offline()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error(f"Migration failed (offline mode) with error: {e}")
|
||||
logger.error(f"Traceback:\n{traceback.format_exc()}")
|
||||
raise
|
||||
else:
|
||||
run_migrations_online()
|
||||
try:
|
||||
run_migrations_online()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error(f"Migration failed (online mode) with error: {e}")
|
||||
logger.error(f"Traceback:\n{traceback.format_exc()}")
|
||||
raise
|
||||
|
||||
@@ -17,21 +17,49 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
inspector = inspect(bind)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'projects' not in existing_tables:
|
||||
return
|
||||
|
||||
projects_columns = [col['name'] for col in inspector.get_columns('projects')]
|
||||
projects_indexes = [idx['name'] for idx in inspector.get_indexes('projects')]
|
||||
projects_unique_constraints = []
|
||||
try:
|
||||
if hasattr(inspector, 'get_unique_constraints'):
|
||||
projects_unique_constraints = [uc['name'] for uc in inspector.get_unique_constraints('projects')]
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add code column if not present
|
||||
with op.batch_alter_table('projects') as batch_op:
|
||||
batch_op.add_column(sa.Column('code', sa.String(length=20), nullable=True))
|
||||
try:
|
||||
batch_op.create_unique_constraint('uq_projects_code', ['code'])
|
||||
except Exception:
|
||||
# Some dialects may not support unique with NULLs the same way; ignore if exists
|
||||
pass
|
||||
try:
|
||||
batch_op.create_index('ix_projects_code', ['code'])
|
||||
except Exception:
|
||||
pass
|
||||
# Add code column if not present (idempotent)
|
||||
if 'code' not in projects_columns:
|
||||
with op.batch_alter_table('projects') as batch_op:
|
||||
batch_op.add_column(sa.Column('code', sa.String(length=20), nullable=True))
|
||||
try:
|
||||
batch_op.create_unique_constraint('uq_projects_code', ['code'])
|
||||
except Exception:
|
||||
# Some dialects may not support unique with NULLs the same way; ignore if exists
|
||||
pass
|
||||
try:
|
||||
batch_op.create_index('ix_projects_code', ['code'])
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Column exists, but ensure constraint and index exist
|
||||
with op.batch_alter_table('projects') as batch_op:
|
||||
if 'uq_projects_code' not in projects_unique_constraints:
|
||||
try:
|
||||
batch_op.create_unique_constraint('uq_projects_code', ['code'])
|
||||
except Exception:
|
||||
pass
|
||||
if 'ix_projects_code' not in projects_indexes:
|
||||
try:
|
||||
batch_op.create_index('ix_projects_code', ['code'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -19,37 +19,50 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add archived_at, archived_by, and archived_reason columns to projects table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
inspector = inspect(bind)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'projects' not in existing_tables:
|
||||
return
|
||||
|
||||
projects_columns = [col['name'] for col in inspector.get_columns('projects')]
|
||||
projects_indexes = [idx['name'] for idx in inspector.get_indexes('projects')]
|
||||
projects_fks = [fk['name'] for fk in inspector.get_foreign_keys('projects')]
|
||||
|
||||
try:
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
# Add archived_at timestamp field
|
||||
batch_op.add_column(sa.Column('archived_at', sa.DateTime(), nullable=True))
|
||||
# Add archived_at timestamp field (idempotent)
|
||||
if 'archived_at' not in projects_columns:
|
||||
batch_op.add_column(sa.Column('archived_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Add archived_by user reference (who archived the project)
|
||||
batch_op.add_column(sa.Column('archived_by', sa.Integer(), nullable=True))
|
||||
# Add archived_by user reference (who archived the project) (idempotent)
|
||||
if 'archived_by' not in projects_columns:
|
||||
batch_op.add_column(sa.Column('archived_by', sa.Integer(), nullable=True))
|
||||
|
||||
# Add archived_reason text field (why the project was archived)
|
||||
batch_op.add_column(sa.Column('archived_reason', sa.Text(), nullable=True))
|
||||
# Add archived_reason text field (why the project was archived) (idempotent)
|
||||
if 'archived_reason' not in projects_columns:
|
||||
batch_op.add_column(sa.Column('archived_reason', sa.Text(), nullable=True))
|
||||
|
||||
# Create foreign key for archived_by
|
||||
try:
|
||||
batch_op.create_foreign_key(
|
||||
'fk_projects_archived_by_users',
|
||||
'users',
|
||||
['archived_by'],
|
||||
['id'],
|
||||
ondelete='SET NULL'
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning creating foreign key for archived_by: {e}")
|
||||
# Create foreign key for archived_by (idempotent)
|
||||
if 'archived_by' in projects_columns and 'fk_projects_archived_by_users' not in projects_fks:
|
||||
try:
|
||||
batch_op.create_foreign_key(
|
||||
'fk_projects_archived_by_users',
|
||||
'users',
|
||||
['archived_by'],
|
||||
['id']
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning creating foreign key for archived_by: {e}")
|
||||
|
||||
# Create index on archived_at for faster filtering
|
||||
try:
|
||||
batch_op.create_index('ix_projects_archived_at', ['archived_at'])
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning creating index on archived_at: {e}")
|
||||
# Create index on archived_at for faster filtering (idempotent)
|
||||
if 'archived_at' in projects_columns and 'ix_projects_archived_at' not in projects_indexes:
|
||||
try:
|
||||
batch_op.create_index('ix_projects_archived_at', ['archived_at'])
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning creating index on archived_at: {e}")
|
||||
|
||||
print("✓ Added project archiving metadata fields")
|
||||
|
||||
|
||||
@@ -21,52 +21,92 @@ depends_on = None
|
||||
def upgrade():
|
||||
"""Create tables for advanced permission system"""
|
||||
|
||||
# Get connection to check if tables exist
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check if permissions table already exists (idempotent migration)
|
||||
inspector = sa.inspect(connection)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Create permissions table
|
||||
op.create_table('permissions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=True),
|
||||
sa.Column('category', sa.String(length=50), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_permissions_name', 'permissions', ['name'], unique=True)
|
||||
op.create_index('idx_permissions_category', 'permissions', ['category'])
|
||||
if 'permissions' not in existing_tables:
|
||||
op.create_table('permissions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=True),
|
||||
sa.Column('category', sa.String(length=50), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_permissions_name', 'permissions', ['name'], unique=True)
|
||||
op.create_index('idx_permissions_category', 'permissions', ['category'])
|
||||
else:
|
||||
# Table exists, skip creation but ensure indexes exist (SQLite doesn't support if_not_exists)
|
||||
try:
|
||||
# Try to create index, ignore if it already exists
|
||||
connection.execute(sa.text("CREATE INDEX IF NOT EXISTS idx_permissions_name ON permissions(name)"))
|
||||
except:
|
||||
pass # Index might already exist
|
||||
try:
|
||||
connection.execute(sa.text("CREATE INDEX IF NOT EXISTS idx_permissions_category ON permissions(category)"))
|
||||
except:
|
||||
pass # Index might already exist
|
||||
|
||||
# Create roles table
|
||||
op.create_table('roles',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_system_role', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_roles_name', 'roles', ['name'], unique=True)
|
||||
if 'roles' not in existing_tables:
|
||||
op.create_table('roles',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_system_role', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_roles_name', 'roles', ['name'], unique=True)
|
||||
else:
|
||||
# Table exists, skip creation but ensure index exists
|
||||
try:
|
||||
connection.execute(sa.text("CREATE INDEX IF NOT EXISTS idx_roles_name ON roles(name)"))
|
||||
except:
|
||||
pass # Index might already exist
|
||||
|
||||
# Create role_permissions association table
|
||||
op.create_table('role_permissions',
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('permission_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['permission_id'], ['permissions.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('role_id', 'permission_id')
|
||||
)
|
||||
if 'role_permissions' not in existing_tables:
|
||||
op.create_table('role_permissions',
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('permission_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['permission_id'], ['permissions.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('role_id', 'permission_id')
|
||||
)
|
||||
|
||||
# Create user_roles association table
|
||||
op.create_table('user_roles',
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('assigned_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('user_id', 'role_id')
|
||||
)
|
||||
if 'user_roles' not in existing_tables:
|
||||
op.create_table('user_roles',
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('assigned_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('user_id', 'role_id')
|
||||
)
|
||||
|
||||
# Seed default permissions and roles
|
||||
seed_permissions_and_roles()
|
||||
# Seed default permissions and roles (only if tables were just created or are empty)
|
||||
# Check if permissions table has data
|
||||
if 'permissions' in existing_tables:
|
||||
try:
|
||||
result = connection.execute(sa.text("SELECT COUNT(*) FROM permissions")).scalar()
|
||||
if result == 0:
|
||||
# Table exists but is empty, seed it
|
||||
seed_permissions_and_roles()
|
||||
except:
|
||||
# If we can't check, assume it needs seeding (safer)
|
||||
pass
|
||||
else:
|
||||
# Tables were just created, seed them
|
||||
seed_permissions_and_roles()
|
||||
|
||||
|
||||
def seed_permissions_and_roles():
|
||||
@@ -178,10 +218,25 @@ def seed_permissions_and_roles():
|
||||
{'id': 50, 'name': 'view_permissions', 'description': 'View permissions and roles', 'category': 'administration'},
|
||||
]
|
||||
|
||||
# Insert permissions
|
||||
for perm in permissions_data:
|
||||
perm['created_at'] = now
|
||||
op.bulk_insert(permissions_table, permissions_data)
|
||||
# Get connection for executing queries
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check if permissions already exist (idempotent)
|
||||
try:
|
||||
existing_perms = connection.execute(sa.text("SELECT COUNT(*) FROM permissions")).scalar()
|
||||
if existing_perms == 0:
|
||||
# Insert permissions
|
||||
for perm in permissions_data:
|
||||
perm['created_at'] = now
|
||||
op.bulk_insert(permissions_table, permissions_data)
|
||||
except:
|
||||
# If table doesn't exist or error, try to insert anyway
|
||||
for perm in permissions_data:
|
||||
perm['created_at'] = now
|
||||
try:
|
||||
op.bulk_insert(permissions_table, permissions_data)
|
||||
except:
|
||||
pass # Permissions might already exist
|
||||
|
||||
# Default roles data
|
||||
roles_data = [
|
||||
@@ -192,53 +247,71 @@ def seed_permissions_and_roles():
|
||||
{'id': 5, 'name': 'viewer', 'description': 'Read-only User', 'is_system_role': True},
|
||||
]
|
||||
|
||||
# Insert roles
|
||||
for role in roles_data:
|
||||
role['created_at'] = now
|
||||
role['updated_at'] = now
|
||||
op.bulk_insert(roles_table, roles_data)
|
||||
# Check if roles already exist (idempotent)
|
||||
try:
|
||||
existing_roles = connection.execute(sa.text("SELECT COUNT(*) FROM roles")).scalar()
|
||||
if existing_roles == 0:
|
||||
# Insert roles
|
||||
for role in roles_data:
|
||||
role['created_at'] = now
|
||||
role['updated_at'] = now
|
||||
op.bulk_insert(roles_table, roles_data)
|
||||
except:
|
||||
# If table doesn't exist or error, try to insert anyway
|
||||
for role in roles_data:
|
||||
role['created_at'] = now
|
||||
role['updated_at'] = now
|
||||
try:
|
||||
op.bulk_insert(roles_table, roles_data)
|
||||
except:
|
||||
pass # Roles might already exist
|
||||
|
||||
# Fix sequences after bulk insert to prevent duplicate key errors
|
||||
# Get connection for executing queries
|
||||
connection = op.get_bind()
|
||||
|
||||
# Fix roles sequence - set to max(id) + 1
|
||||
connection.execute(sa.text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
CREATE SEQUENCE IF NOT EXISTS roles_id_seq;
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_depend
|
||||
WHERE objid = 'roles_id_seq'::regclass
|
||||
AND refobjid = 'roles'::regclass
|
||||
) THEN
|
||||
ALTER TABLE roles ALTER COLUMN id SET DEFAULT nextval('roles_id_seq');
|
||||
ALTER SEQUENCE roles_id_seq OWNED BY roles.id;
|
||||
END IF;
|
||||
PERFORM setval('roles_id_seq',
|
||||
COALESCE((SELECT MAX(id) FROM roles), 0) + 1,
|
||||
false);
|
||||
END $$;
|
||||
"""))
|
||||
|
||||
# Fix permissions sequence - set to max(id) + 1
|
||||
connection.execute(sa.text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
CREATE SEQUENCE IF NOT EXISTS permissions_id_seq;
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_depend
|
||||
WHERE objid = 'permissions_id_seq'::regclass
|
||||
AND refobjid = 'permissions'::regclass
|
||||
) THEN
|
||||
ALTER TABLE permissions ALTER COLUMN id SET DEFAULT nextval('permissions_id_seq');
|
||||
ALTER SEQUENCE permissions_id_seq OWNED BY permissions.id;
|
||||
END IF;
|
||||
PERFORM setval('permissions_id_seq',
|
||||
COALESCE((SELECT MAX(id) FROM permissions), 0) + 1,
|
||||
false);
|
||||
END $$;
|
||||
"""))
|
||||
# Fix sequences after bulk insert to prevent duplicate key errors (PostgreSQL only)
|
||||
# Check if we're using PostgreSQL
|
||||
if connection.dialect.name == 'postgresql':
|
||||
# Fix roles sequence - set to max(id) + 1
|
||||
try:
|
||||
connection.execute(sa.text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
CREATE SEQUENCE IF NOT EXISTS roles_id_seq;
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_depend
|
||||
WHERE objid = 'roles_id_seq'::regclass
|
||||
AND refobjid = 'roles'::regclass
|
||||
) THEN
|
||||
ALTER TABLE roles ALTER COLUMN id SET DEFAULT nextval('roles_id_seq');
|
||||
ALTER SEQUENCE roles_id_seq OWNED BY roles.id;
|
||||
END IF;
|
||||
PERFORM setval('roles_id_seq',
|
||||
COALESCE((SELECT MAX(id) FROM roles), 0) + 1,
|
||||
false);
|
||||
END $$;
|
||||
"""))
|
||||
except:
|
||||
pass # Sequence might already be set
|
||||
|
||||
# Fix permissions sequence - set to max(id) + 1
|
||||
try:
|
||||
connection.execute(sa.text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
CREATE SEQUENCE IF NOT EXISTS permissions_id_seq;
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_depend
|
||||
WHERE objid = 'permissions_id_seq'::regclass
|
||||
AND refobjid = 'permissions'::regclass
|
||||
) THEN
|
||||
ALTER TABLE permissions ALTER COLUMN id SET DEFAULT nextval('permissions_id_seq');
|
||||
ALTER SEQUENCE permissions_id_seq OWNED BY permissions.id;
|
||||
END IF;
|
||||
PERFORM setval('permissions_id_seq',
|
||||
COALESCE((SELECT MAX(id) FROM permissions), 0) + 1,
|
||||
false);
|
||||
END $$;
|
||||
"""))
|
||||
except:
|
||||
pass # Sequence might already be set
|
||||
|
||||
# Define role-permission mappings
|
||||
role_permission_mappings = []
|
||||
@@ -266,24 +339,36 @@ def seed_permissions_and_roles():
|
||||
for perm_id in viewer_perms:
|
||||
role_permission_mappings.append({'role_id': 5, 'permission_id': perm_id, 'created_at': now})
|
||||
|
||||
# Insert role-permission mappings
|
||||
op.bulk_insert(role_permissions_table, role_permission_mappings)
|
||||
# Insert role-permission mappings (only if they don't exist)
|
||||
try:
|
||||
existing_mappings = connection.execute(sa.text("SELECT COUNT(*) FROM role_permissions")).scalar()
|
||||
if existing_mappings == 0:
|
||||
op.bulk_insert(role_permissions_table, role_permission_mappings)
|
||||
except:
|
||||
# If table doesn't exist or error, try to insert anyway
|
||||
try:
|
||||
op.bulk_insert(role_permissions_table, role_permission_mappings)
|
||||
except:
|
||||
pass # Mappings might already exist
|
||||
|
||||
# Migrate existing users to new role system
|
||||
# Get connection for executing queries
|
||||
connection = op.get_bind()
|
||||
|
||||
# Find all users with role='admin' and assign them the 'admin' role
|
||||
admin_users = connection.execute(sa.text("SELECT id FROM users WHERE role = 'admin'")).fetchall()
|
||||
admin_role_assignments = [{'user_id': user[0], 'role_id': 2, 'assigned_at': now} for user in admin_users]
|
||||
if admin_role_assignments:
|
||||
op.bulk_insert(user_roles_table, admin_role_assignments)
|
||||
|
||||
# Find all users with role='user' and assign them the 'user' role
|
||||
regular_users = connection.execute(sa.text("SELECT id FROM users WHERE role = 'user'")).fetchall()
|
||||
user_role_assignments = [{'user_id': user[0], 'role_id': 4, 'assigned_at': now} for user in regular_users]
|
||||
if user_role_assignments:
|
||||
op.bulk_insert(user_roles_table, user_role_assignments)
|
||||
try:
|
||||
# Check if user_roles table has data
|
||||
existing_user_roles = connection.execute(sa.text("SELECT COUNT(*) FROM user_roles")).scalar()
|
||||
if existing_user_roles == 0:
|
||||
# Find all users with role='admin' and assign them the 'admin' role
|
||||
admin_users = connection.execute(sa.text("SELECT id FROM users WHERE role = 'admin'")).fetchall()
|
||||
admin_role_assignments = [{'user_id': user[0], 'role_id': 2, 'assigned_at': now} for user in admin_users]
|
||||
if admin_role_assignments:
|
||||
op.bulk_insert(user_roles_table, admin_role_assignments)
|
||||
|
||||
# Find all users with role='user' and assign them the 'user' role
|
||||
regular_users = connection.execute(sa.text("SELECT id FROM users WHERE role = 'user'")).fetchall()
|
||||
user_role_assignments = [{'user_id': user[0], 'role_id': 4, 'assigned_at': now} for user in regular_users]
|
||||
if user_role_assignments:
|
||||
op.bulk_insert(user_roles_table, user_role_assignments)
|
||||
except:
|
||||
pass # User roles might already be assigned
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -57,11 +57,20 @@ def upgrade() -> None:
|
||||
op.add_column('payments', sa.Column('status', sa.String(20), nullable=False, server_default='completed'))
|
||||
|
||||
if 'received_by' not in existing_columns:
|
||||
op.add_column('payments', sa.Column('received_by', sa.Integer(), nullable=True))
|
||||
try:
|
||||
op.create_foreign_key('fk_payments_received_by', 'payments', 'users', ['received_by'], ['id'], ondelete='SET NULL')
|
||||
except:
|
||||
pass
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('payments', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('received_by', sa.Integer(), nullable=True))
|
||||
try:
|
||||
batch_op.create_foreign_key('fk_payments_received_by', 'users', ['received_by'], ['id'])
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
op.add_column('payments', sa.Column('received_by', sa.Integer(), nullable=True))
|
||||
try:
|
||||
op.create_foreign_key('fk_payments_received_by', 'payments', 'users', ['received_by'], ['id'], ondelete='SET NULL')
|
||||
except:
|
||||
pass
|
||||
|
||||
if 'gateway_transaction_id' not in existing_columns:
|
||||
op.add_column('payments', sa.Column('gateway_transaction_id', sa.String(255), nullable=True))
|
||||
|
||||
@@ -144,6 +144,9 @@ def upgrade():
|
||||
op.create_index('ix_per_diems_user_id', 'per_diems', ['user_id'])
|
||||
op.create_index('ix_per_diems_trip_start', 'per_diems', ['trip_start_date'])
|
||||
|
||||
# Check database dialect for SQLite batch mode
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
|
||||
# Add new columns to expenses table (idempotent)
|
||||
if 'expenses' in existing_tables:
|
||||
existing_columns = [col['name'] for col in inspector.get_columns('expenses')]
|
||||
@@ -158,31 +161,58 @@ def upgrade():
|
||||
# Add foreign keys from expenses to mileage and per_diems (idempotent)
|
||||
existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('expenses')]
|
||||
|
||||
if 'fk_expenses_mileage' not in existing_fks:
|
||||
op.create_foreign_key('fk_expenses_mileage', 'expenses', 'mileage', ['mileage_id'], ['id'])
|
||||
if 'fk_expenses_per_diem' not in existing_fks:
|
||||
op.create_foreign_key('fk_expenses_per_diem', 'expenses', 'per_diems', ['per_diem_id'], ['id'])
|
||||
# SQLite requires batch mode for adding constraints to existing tables
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('expenses', schema=None) as batch_op:
|
||||
if 'fk_expenses_mileage' not in existing_fks:
|
||||
batch_op.create_foreign_key('fk_expenses_mileage', 'mileage', ['mileage_id'], ['id'])
|
||||
if 'fk_expenses_per_diem' not in existing_fks:
|
||||
batch_op.create_foreign_key('fk_expenses_per_diem', 'per_diems', ['per_diem_id'], ['id'])
|
||||
else:
|
||||
# PostgreSQL and other databases can add constraints directly
|
||||
if 'fk_expenses_mileage' not in existing_fks:
|
||||
op.create_foreign_key('fk_expenses_mileage', 'expenses', 'mileage', ['mileage_id'], ['id'])
|
||||
if 'fk_expenses_per_diem' not in existing_fks:
|
||||
op.create_foreign_key('fk_expenses_per_diem', 'expenses', 'per_diems', ['per_diem_id'], ['id'])
|
||||
|
||||
# Now add the circular foreign keys from mileage and per_diems back to expenses (idempotent)
|
||||
if 'mileage' in existing_tables:
|
||||
mileage_fks = [fk['name'] for fk in inspector.get_foreign_keys('mileage')]
|
||||
if 'fk_mileage_expense' not in mileage_fks:
|
||||
op.create_foreign_key('fk_mileage_expense', 'mileage', 'expenses', ['expense_id'], ['id'])
|
||||
# Re-check inspector after potential table creations
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
if 'per_diems' in existing_tables:
|
||||
if 'mileage' in inspector.get_table_names():
|
||||
mileage_columns = [col['name'] for col in inspector.get_columns('mileage')]
|
||||
mileage_fks = [fk['name'] for fk in inspector.get_foreign_keys('mileage')]
|
||||
|
||||
# Ensure expense_id column exists before adding FK
|
||||
if 'expense_id' in mileage_columns and 'fk_mileage_expense' not in mileage_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('mileage', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_mileage_expense', 'expenses', ['expense_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_mileage_expense', 'mileage', 'expenses', ['expense_id'], ['id'])
|
||||
|
||||
if 'per_diems' in inspector.get_table_names():
|
||||
per_diems_columns = [col['name'] for col in inspector.get_columns('per_diems')]
|
||||
per_diems_fks = [fk['name'] for fk in inspector.get_foreign_keys('per_diems')]
|
||||
if 'fk_per_diems_expense' not in per_diems_fks:
|
||||
op.create_foreign_key('fk_per_diems_expense', 'per_diems', 'expenses', ['expense_id'], ['id'])
|
||||
|
||||
# Ensure expense_id column exists before adding FK
|
||||
if 'expense_id' in per_diems_columns and 'fk_per_diems_expense' not in per_diems_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diems', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_per_diems_expense', 'expenses', ['expense_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_per_diems_expense', 'per_diems', 'expenses', ['expense_id'], ['id'])
|
||||
|
||||
# Insert default expense categories (idempotent)
|
||||
# Re-check table existence since tables may have been created in this migration
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
current_tables = inspector.get_table_names()
|
||||
|
||||
# Determine database type for SQL syntax differences
|
||||
is_postgresql = conn.dialect.name == 'postgresql'
|
||||
|
||||
if 'expense_categories' in current_tables:
|
||||
# Use database-specific syntax for upsert
|
||||
is_postgresql = conn.dialect.name == 'postgresql'
|
||||
|
||||
if is_postgresql:
|
||||
# PostgreSQL syntax
|
||||
|
||||
@@ -7,6 +7,7 @@ Create Date: 2025-10-30 15:05:00.000000
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '038_fix_expenses_schema'
|
||||
@@ -16,132 +17,402 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Check if tables exist (idempotent)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Fix mileage table - rename columns and add missing ones
|
||||
op.alter_column('mileage', 'trip_purpose', new_column_name='purpose', existing_type=sa.Text(), existing_nullable=False)
|
||||
op.alter_column('mileage', 'vehicle_registration', new_column_name='license_plate', existing_type=sa.String(20), existing_nullable=True)
|
||||
op.alter_column('mileage', 'total_amount', new_column_name='calculated_amount', existing_type=sa.Numeric(10, 2), existing_nullable=True)
|
||||
|
||||
# Add missing columns to mileage
|
||||
op.add_column('mileage', sa.Column('description', sa.Text(), nullable=True))
|
||||
op.add_column('mileage', sa.Column('start_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('mileage', sa.Column('end_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('mileage', sa.Column('distance_miles', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('mileage', sa.Column('rate_per_mile', sa.Numeric(precision=10, scale=4), nullable=True))
|
||||
op.add_column('mileage', sa.Column('vehicle_description', sa.String(length=200), nullable=True))
|
||||
op.add_column('mileage', sa.Column('is_round_trip', sa.Boolean(), nullable=False, server_default='false'))
|
||||
op.add_column('mileage', sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
op.add_column('mileage', sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
op.add_column('mileage', sa.Column('currency_code', sa.String(length=3), nullable=False, server_default='EUR'))
|
||||
|
||||
# Make rate_per_km NOT NULL (it's required)
|
||||
op.alter_column('mileage', 'rate_per_km', nullable=False, server_default='0.30')
|
||||
if 'mileage' in existing_tables:
|
||||
mileage_columns = [col['name'] for col in inspector.get_columns('mileage')]
|
||||
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('mileage', schema=None) as batch_op:
|
||||
# Rename columns
|
||||
if 'trip_purpose' in mileage_columns and 'purpose' not in mileage_columns:
|
||||
batch_op.alter_column('trip_purpose', new_column_name='purpose')
|
||||
if 'vehicle_registration' in mileage_columns and 'license_plate' not in mileage_columns:
|
||||
batch_op.alter_column('vehicle_registration', new_column_name='license_plate')
|
||||
if 'total_amount' in mileage_columns and 'calculated_amount' not in mileage_columns:
|
||||
batch_op.alter_column('total_amount', new_column_name='calculated_amount')
|
||||
|
||||
# Add missing columns
|
||||
if 'description' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=True))
|
||||
if 'start_odometer' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('start_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'end_odometer' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('end_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'distance_miles' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('distance_miles', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'rate_per_mile' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('rate_per_mile', sa.Numeric(precision=10, scale=4), nullable=True))
|
||||
if 'vehicle_description' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('vehicle_description', sa.String(length=200), nullable=True))
|
||||
if 'is_round_trip' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('is_round_trip', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed_at' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
if 'currency_code' not in mileage_columns:
|
||||
batch_op.add_column(sa.Column('currency_code', sa.String(length=3), nullable=False, server_default='EUR'))
|
||||
|
||||
# Make rate_per_km NOT NULL if it exists and is nullable
|
||||
if 'rate_per_km' in mileage_columns:
|
||||
# Check current nullability
|
||||
rate_col = next((col for col in inspector.get_columns('mileage') if col['name'] == 'rate_per_km'), None)
|
||||
if rate_col and rate_col.get('nullable', True):
|
||||
# Set default for NULL values first
|
||||
conn.execute(sa.text("UPDATE mileage SET rate_per_km = 0.30 WHERE rate_per_km IS NULL"))
|
||||
batch_op.alter_column('rate_per_km', nullable=False, server_default='0.30')
|
||||
else:
|
||||
# PostgreSQL and other databases
|
||||
if 'trip_purpose' in mileage_columns and 'purpose' not in mileage_columns:
|
||||
op.alter_column('mileage', 'trip_purpose', new_column_name='purpose', existing_type=sa.Text(), existing_nullable=False)
|
||||
if 'vehicle_registration' in mileage_columns and 'license_plate' not in mileage_columns:
|
||||
op.alter_column('mileage', 'vehicle_registration', new_column_name='license_plate', existing_type=sa.String(20), existing_nullable=True)
|
||||
if 'total_amount' in mileage_columns and 'calculated_amount' not in mileage_columns:
|
||||
op.alter_column('mileage', 'total_amount', new_column_name='calculated_amount', existing_type=sa.Numeric(10, 2), existing_nullable=True)
|
||||
|
||||
# Add missing columns
|
||||
if 'description' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('description', sa.Text(), nullable=True))
|
||||
if 'start_odometer' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('start_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'end_odometer' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('end_odometer', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'distance_miles' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('distance_miles', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'rate_per_mile' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('rate_per_mile', sa.Numeric(precision=10, scale=4), nullable=True))
|
||||
if 'vehicle_description' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('vehicle_description', sa.String(length=200), nullable=True))
|
||||
if 'is_round_trip' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('is_round_trip', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed_at' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
if 'currency_code' not in mileage_columns:
|
||||
op.add_column('mileage', sa.Column('currency_code', sa.String(length=3), nullable=False, server_default='EUR'))
|
||||
|
||||
# Make rate_per_km NOT NULL
|
||||
if 'rate_per_km' in mileage_columns:
|
||||
rate_col = next((col for col in inspector.get_columns('mileage') if col['name'] == 'rate_per_km'), None)
|
||||
if rate_col and rate_col.get('nullable', True):
|
||||
conn.execute(sa.text("UPDATE mileage SET rate_per_km = 0.30 WHERE rate_per_km IS NULL"))
|
||||
op.alter_column('mileage', 'rate_per_km', nullable=False, server_default='0.30')
|
||||
|
||||
# Fix per_diem_rates table - rename columns
|
||||
op.alter_column('per_diem_rates', 'location', new_column_name='city', existing_type=sa.String(255), existing_nullable=True)
|
||||
op.alter_column('per_diem_rates', 'valid_from', new_column_name='effective_from', existing_type=sa.Date(), existing_nullable=False)
|
||||
op.alter_column('per_diem_rates', 'valid_to', new_column_name='effective_to', existing_type=sa.Date(), existing_nullable=True)
|
||||
|
||||
# Add missing columns to per_diem_rates
|
||||
op.add_column('per_diem_rates', sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diem_rates', sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diem_rates', sa.Column('breakfast_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('lunch_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('dinner_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('incidental_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')))
|
||||
|
||||
# Rename country_code to country
|
||||
op.alter_column('per_diem_rates', 'country_code', new_column_name='country', existing_type=sa.String(2), existing_nullable=False)
|
||||
|
||||
# Drop old rate_per_day column after copying to full_day_rate
|
||||
op.execute("UPDATE per_diem_rates SET full_day_rate = rate_per_day, half_day_rate = rate_per_day * 0.5")
|
||||
op.drop_column('per_diem_rates', 'rate_per_day')
|
||||
op.drop_column('per_diem_rates', 'breakfast_deduction')
|
||||
op.drop_column('per_diem_rates', 'lunch_deduction')
|
||||
op.drop_column('per_diem_rates', 'dinner_deduction')
|
||||
if 'per_diem_rates' in existing_tables:
|
||||
per_diem_rates_columns = [col['name'] for col in inspector.get_columns('per_diem_rates')]
|
||||
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diem_rates', schema=None) as batch_op:
|
||||
# Rename columns
|
||||
if 'location' in per_diem_rates_columns and 'city' not in per_diem_rates_columns:
|
||||
batch_op.alter_column('location', new_column_name='city')
|
||||
if 'valid_from' in per_diem_rates_columns and 'effective_from' not in per_diem_rates_columns:
|
||||
batch_op.alter_column('valid_from', new_column_name='effective_from')
|
||||
if 'valid_to' in per_diem_rates_columns and 'effective_to' not in per_diem_rates_columns:
|
||||
batch_op.alter_column('valid_to', new_column_name='effective_to')
|
||||
if 'country_code' in per_diem_rates_columns and 'country' not in per_diem_rates_columns:
|
||||
batch_op.alter_column('country_code', new_column_name='country')
|
||||
|
||||
# Add missing columns
|
||||
if 'full_day_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'half_day_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'breakfast_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('breakfast_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'lunch_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('lunch_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'dinner_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('dinner_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'incidental_rate' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('incidental_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'updated_at' not in per_diem_rates_columns:
|
||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')))
|
||||
|
||||
# Drop old columns (SQLite 3.35.0+ supports DROP COLUMN)
|
||||
# Copy data first
|
||||
if 'rate_per_day' in per_diem_rates_columns and 'full_day_rate' in per_diem_rates_columns:
|
||||
conn.execute(sa.text("UPDATE per_diem_rates SET full_day_rate = rate_per_day, half_day_rate = rate_per_day * 0.5 WHERE full_day_rate = 0"))
|
||||
if 'rate_per_day' in per_diem_rates_columns:
|
||||
batch_op.drop_column('rate_per_day')
|
||||
if 'breakfast_deduction' in per_diem_rates_columns:
|
||||
batch_op.drop_column('breakfast_deduction')
|
||||
if 'lunch_deduction' in per_diem_rates_columns:
|
||||
batch_op.drop_column('lunch_deduction')
|
||||
if 'dinner_deduction' in per_diem_rates_columns:
|
||||
batch_op.drop_column('dinner_deduction')
|
||||
else:
|
||||
# PostgreSQL and other databases
|
||||
if 'location' in per_diem_rates_columns and 'city' not in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'location', new_column_name='city', existing_type=sa.String(255), existing_nullable=True)
|
||||
if 'valid_from' in per_diem_rates_columns and 'effective_from' not in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'valid_from', new_column_name='effective_from', existing_type=sa.Date(), existing_nullable=False)
|
||||
if 'valid_to' in per_diem_rates_columns and 'effective_to' not in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'valid_to', new_column_name='effective_to', existing_type=sa.Date(), existing_nullable=True)
|
||||
if 'country_code' in per_diem_rates_columns and 'country' not in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'country_code', new_column_name='country', existing_type=sa.String(2), existing_nullable=False)
|
||||
|
||||
# Add missing columns
|
||||
if 'full_day_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'half_day_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'breakfast_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('breakfast_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'lunch_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('lunch_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'dinner_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('dinner_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'incidental_rate' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('incidental_rate', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
if 'updated_at' not in per_diem_rates_columns:
|
||||
op.add_column('per_diem_rates', sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')))
|
||||
|
||||
# Drop old columns after copying data
|
||||
if 'rate_per_day' in per_diem_rates_columns:
|
||||
conn.execute(sa.text("UPDATE per_diem_rates SET full_day_rate = rate_per_day, half_day_rate = rate_per_day * 0.5"))
|
||||
op.drop_column('per_diem_rates', 'rate_per_day')
|
||||
if 'breakfast_deduction' in per_diem_rates_columns:
|
||||
op.drop_column('per_diem_rates', 'breakfast_deduction')
|
||||
if 'lunch_deduction' in per_diem_rates_columns:
|
||||
op.drop_column('per_diem_rates', 'lunch_deduction')
|
||||
if 'dinner_deduction' in per_diem_rates_columns:
|
||||
op.drop_column('per_diem_rates', 'dinner_deduction')
|
||||
|
||||
# Fix per_diems table - rename columns
|
||||
op.alter_column('per_diems', 'trip_start_date', new_column_name='start_date', existing_type=sa.Date(), existing_nullable=False)
|
||||
op.alter_column('per_diems', 'trip_end_date', new_column_name='end_date', existing_type=sa.Date(), existing_nullable=False)
|
||||
op.alter_column('per_diems', 'destination_country', new_column_name='country', existing_type=sa.String(2), existing_nullable=False)
|
||||
op.alter_column('per_diems', 'destination_location', new_column_name='city', existing_type=sa.String(255), existing_nullable=True)
|
||||
op.alter_column('per_diems', 'number_of_days', new_column_name='full_days', existing_type=sa.Integer(), existing_nullable=False)
|
||||
op.alter_column('per_diems', 'total_amount', new_column_name='calculated_amount', existing_type=sa.Numeric(10, 2), existing_nullable=True)
|
||||
|
||||
# Add missing columns to per_diems
|
||||
op.add_column('per_diems', sa.Column('trip_purpose', sa.String(length=255), nullable=False, server_default='Business trip'))
|
||||
op.add_column('per_diems', sa.Column('description', sa.Text(), nullable=True))
|
||||
op.add_column('per_diems', sa.Column('departure_time', sa.Time(), nullable=True))
|
||||
op.add_column('per_diems', sa.Column('return_time', sa.Time(), nullable=True))
|
||||
op.add_column('per_diems', sa.Column('half_days', sa.Integer(), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('total_days', sa.Numeric(precision=5, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('breakfast_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('lunch_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('dinner_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diems', sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
op.add_column('per_diems', sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
op.add_column('per_diems', sa.Column('approval_notes', sa.Text(), nullable=True))
|
||||
if 'per_diems' in existing_tables:
|
||||
per_diems_columns = [col['name'] for col in inspector.get_columns('per_diems')]
|
||||
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diems', schema=None) as batch_op:
|
||||
# Rename columns
|
||||
if 'trip_start_date' in per_diems_columns and 'start_date' not in per_diems_columns:
|
||||
batch_op.alter_column('trip_start_date', new_column_name='start_date')
|
||||
if 'trip_end_date' in per_diems_columns and 'end_date' not in per_diems_columns:
|
||||
batch_op.alter_column('trip_end_date', new_column_name='end_date')
|
||||
if 'destination_country' in per_diems_columns and 'country' not in per_diems_columns:
|
||||
batch_op.alter_column('destination_country', new_column_name='country')
|
||||
if 'destination_location' in per_diems_columns and 'city' not in per_diems_columns:
|
||||
batch_op.alter_column('destination_location', new_column_name='city')
|
||||
if 'number_of_days' in per_diems_columns and 'full_days' not in per_diems_columns:
|
||||
batch_op.alter_column('number_of_days', new_column_name='full_days')
|
||||
if 'total_amount' in per_diems_columns and 'calculated_amount' not in per_diems_columns:
|
||||
batch_op.alter_column('total_amount', new_column_name='calculated_amount')
|
||||
|
||||
# Add missing columns
|
||||
if 'trip_purpose' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('trip_purpose', sa.String(length=255), nullable=False, server_default='Business trip'))
|
||||
if 'description' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=True))
|
||||
if 'departure_time' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('departure_time', sa.Time(), nullable=True))
|
||||
if 'return_time' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('return_time', sa.Time(), nullable=True))
|
||||
if 'half_days' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('half_days', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'total_days' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('total_days', sa.Numeric(precision=5, scale=2), nullable=False, server_default='0'))
|
||||
if 'full_day_rate' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'half_day_rate' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'breakfast_deduction' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('breakfast_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'lunch_deduction' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('lunch_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'dinner_deduction' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('dinner_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'reimbursed' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed_at' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
if 'approval_notes' not in per_diems_columns:
|
||||
batch_op.add_column(sa.Column('approval_notes', sa.Text(), nullable=True))
|
||||
else:
|
||||
# PostgreSQL and other databases
|
||||
if 'trip_start_date' in per_diems_columns and 'start_date' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'trip_start_date', new_column_name='start_date', existing_type=sa.Date(), existing_nullable=False)
|
||||
if 'trip_end_date' in per_diems_columns and 'end_date' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'trip_end_date', new_column_name='end_date', existing_type=sa.Date(), existing_nullable=False)
|
||||
if 'destination_country' in per_diems_columns and 'country' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'destination_country', new_column_name='country', existing_type=sa.String(2), existing_nullable=False)
|
||||
if 'destination_location' in per_diems_columns and 'city' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'destination_location', new_column_name='city', existing_type=sa.String(255), existing_nullable=True)
|
||||
if 'number_of_days' in per_diems_columns and 'full_days' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'number_of_days', new_column_name='full_days', existing_type=sa.Integer(), existing_nullable=False)
|
||||
if 'total_amount' in per_diems_columns and 'calculated_amount' not in per_diems_columns:
|
||||
op.alter_column('per_diems', 'total_amount', new_column_name='calculated_amount', existing_type=sa.Numeric(10, 2), existing_nullable=True)
|
||||
|
||||
# Add missing columns
|
||||
if 'trip_purpose' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('trip_purpose', sa.String(length=255), nullable=False, server_default='Business trip'))
|
||||
if 'description' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('description', sa.Text(), nullable=True))
|
||||
if 'departure_time' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('departure_time', sa.Time(), nullable=True))
|
||||
if 'return_time' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('return_time', sa.Time(), nullable=True))
|
||||
if 'half_days' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('half_days', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'total_days' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('total_days', sa.Numeric(precision=5, scale=2), nullable=False, server_default='0'))
|
||||
if 'full_day_rate' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('full_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'half_day_rate' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('half_day_rate', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'breakfast_deduction' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('breakfast_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'lunch_deduction' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('lunch_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'dinner_deduction' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('dinner_deduction', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
if 'reimbursed' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('reimbursed', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'reimbursed_at' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('reimbursed_at', sa.DateTime(), nullable=True))
|
||||
if 'approval_notes' not in per_diems_columns:
|
||||
op.add_column('per_diems', sa.Column('approval_notes', sa.Text(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Revert per_diems changes
|
||||
op.drop_column('per_diems', 'approval_notes')
|
||||
op.drop_column('per_diems', 'reimbursed_at')
|
||||
op.drop_column('per_diems', 'reimbursed')
|
||||
op.drop_column('per_diems', 'dinner_deduction')
|
||||
op.drop_column('per_diems', 'lunch_deduction')
|
||||
op.drop_column('per_diems', 'breakfast_deduction')
|
||||
op.drop_column('per_diems', 'half_day_rate')
|
||||
op.drop_column('per_diems', 'full_day_rate')
|
||||
op.drop_column('per_diems', 'total_days')
|
||||
op.drop_column('per_diems', 'half_days')
|
||||
op.drop_column('per_diems', 'return_time')
|
||||
op.drop_column('per_diems', 'departure_time')
|
||||
op.drop_column('per_diems', 'description')
|
||||
op.drop_column('per_diems', 'trip_purpose')
|
||||
# Note: Downgrade is complex and may not work perfectly in SQLite
|
||||
# For production, consider backing up before downgrading
|
||||
conn = op.get_bind()
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
inspector = inspect(conn)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
op.alter_column('per_diems', 'calculated_amount', new_column_name='total_amount')
|
||||
op.alter_column('per_diems', 'full_days', new_column_name='number_of_days')
|
||||
op.alter_column('per_diems', 'city', new_column_name='destination_location')
|
||||
op.alter_column('per_diems', 'country', new_column_name='destination_country')
|
||||
op.alter_column('per_diems', 'end_date', new_column_name='trip_end_date')
|
||||
op.alter_column('per_diems', 'start_date', new_column_name='trip_start_date')
|
||||
# Revert per_diems changes
|
||||
if 'per_diems' in existing_tables:
|
||||
per_diems_columns = [col['name'] for col in inspector.get_columns('per_diems')]
|
||||
|
||||
# Drop added columns
|
||||
for col in ['approval_notes', 'reimbursed_at', 'reimbursed', 'dinner_deduction', 'lunch_deduction',
|
||||
'breakfast_deduction', 'half_day_rate', 'full_day_rate', 'total_days', 'half_days',
|
||||
'return_time', 'departure_time', 'description', 'trip_purpose']:
|
||||
if col in per_diems_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diems', schema=None) as batch_op:
|
||||
batch_op.drop_column(col)
|
||||
else:
|
||||
op.drop_column('per_diems', col)
|
||||
|
||||
# Rename columns back
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diems', schema=None) as batch_op:
|
||||
if 'calculated_amount' in per_diems_columns:
|
||||
batch_op.alter_column('calculated_amount', new_column_name='total_amount')
|
||||
if 'full_days' in per_diems_columns:
|
||||
batch_op.alter_column('full_days', new_column_name='number_of_days')
|
||||
if 'city' in per_diems_columns:
|
||||
batch_op.alter_column('city', new_column_name='destination_location')
|
||||
if 'country' in per_diems_columns:
|
||||
batch_op.alter_column('country', new_column_name='destination_country')
|
||||
if 'end_date' in per_diems_columns:
|
||||
batch_op.alter_column('end_date', new_column_name='trip_end_date')
|
||||
if 'start_date' in per_diems_columns:
|
||||
batch_op.alter_column('start_date', new_column_name='trip_start_date')
|
||||
else:
|
||||
if 'calculated_amount' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'calculated_amount', new_column_name='total_amount')
|
||||
if 'full_days' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'full_days', new_column_name='number_of_days')
|
||||
if 'city' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'city', new_column_name='destination_location')
|
||||
if 'country' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'country', new_column_name='destination_country')
|
||||
if 'end_date' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'end_date', new_column_name='trip_end_date')
|
||||
if 'start_date' in per_diems_columns:
|
||||
op.alter_column('per_diems', 'start_date', new_column_name='trip_start_date')
|
||||
|
||||
# Revert per_diem_rates changes
|
||||
op.add_column('per_diem_rates', sa.Column('rate_per_day', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
op.add_column('per_diem_rates', sa.Column('breakfast_deduction', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('lunch_deduction', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.add_column('per_diem_rates', sa.Column('dinner_deduction', sa.Numeric(precision=10, scale=2), nullable=True))
|
||||
op.execute("UPDATE per_diem_rates SET rate_per_day = full_day_rate")
|
||||
op.drop_column('per_diem_rates', 'updated_at')
|
||||
op.drop_column('per_diem_rates', 'incidental_rate')
|
||||
op.drop_column('per_diem_rates', 'dinner_rate')
|
||||
op.drop_column('per_diem_rates', 'lunch_rate')
|
||||
op.drop_column('per_diem_rates', 'breakfast_rate')
|
||||
op.drop_column('per_diem_rates', 'half_day_rate')
|
||||
op.drop_column('per_diem_rates', 'full_day_rate')
|
||||
|
||||
op.alter_column('per_diem_rates', 'country', new_column_name='country_code')
|
||||
op.alter_column('per_diem_rates', 'effective_to', new_column_name='valid_to')
|
||||
op.alter_column('per_diem_rates', 'effective_from', new_column_name='valid_from')
|
||||
op.alter_column('per_diem_rates', 'city', new_column_name='location')
|
||||
if 'per_diem_rates' in existing_tables:
|
||||
per_diem_rates_columns = [col['name'] for col in inspector.get_columns('per_diem_rates')]
|
||||
|
||||
# Add back old columns
|
||||
if 'rate_per_day' not in per_diem_rates_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diem_rates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rate_per_day', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
else:
|
||||
op.add_column('per_diem_rates', sa.Column('rate_per_day', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0'))
|
||||
|
||||
# Copy data back
|
||||
if 'rate_per_day' in inspector.get_table_names() and 'full_day_rate' in per_diem_rates_columns:
|
||||
conn.execute(sa.text("UPDATE per_diem_rates SET rate_per_day = full_day_rate"))
|
||||
|
||||
# Drop new columns and rename back
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('per_diem_rates', schema=None) as batch_op:
|
||||
for col in ['updated_at', 'incidental_rate', 'dinner_rate', 'lunch_rate', 'breakfast_rate',
|
||||
'half_day_rate', 'full_day_rate']:
|
||||
if col in per_diem_rates_columns:
|
||||
batch_op.drop_column(col)
|
||||
|
||||
if 'country' in per_diem_rates_columns:
|
||||
batch_op.alter_column('country', new_column_name='country_code')
|
||||
if 'effective_to' in per_diem_rates_columns:
|
||||
batch_op.alter_column('effective_to', new_column_name='valid_to')
|
||||
if 'effective_from' in per_diem_rates_columns:
|
||||
batch_op.alter_column('effective_from', new_column_name='valid_from')
|
||||
if 'city' in per_diem_rates_columns:
|
||||
batch_op.alter_column('city', new_column_name='location')
|
||||
else:
|
||||
for col in ['updated_at', 'incidental_rate', 'dinner_rate', 'lunch_rate', 'breakfast_rate',
|
||||
'half_day_rate', 'full_day_rate']:
|
||||
if col in per_diem_rates_columns:
|
||||
op.drop_column('per_diem_rates', col)
|
||||
|
||||
if 'country' in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'country', new_column_name='country_code')
|
||||
if 'effective_to' in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'effective_to', new_column_name='valid_to')
|
||||
if 'effective_from' in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'effective_from', new_column_name='valid_from')
|
||||
if 'city' in per_diem_rates_columns:
|
||||
op.alter_column('per_diem_rates', 'city', new_column_name='location')
|
||||
|
||||
# Revert mileage changes
|
||||
op.drop_column('mileage', 'currency_code')
|
||||
op.drop_column('mileage', 'reimbursed_at')
|
||||
op.drop_column('mileage', 'reimbursed')
|
||||
op.drop_column('mileage', 'is_round_trip')
|
||||
op.drop_column('mileage', 'vehicle_description')
|
||||
op.drop_column('mileage', 'rate_per_mile')
|
||||
op.drop_column('mileage', 'distance_miles')
|
||||
op.drop_column('mileage', 'end_odometer')
|
||||
op.drop_column('mileage', 'start_odometer')
|
||||
op.drop_column('mileage', 'description')
|
||||
|
||||
op.alter_column('mileage', 'rate_per_km', nullable=True)
|
||||
op.alter_column('mileage', 'calculated_amount', new_column_name='total_amount')
|
||||
op.alter_column('mileage', 'license_plate', new_column_name='vehicle_registration')
|
||||
op.alter_column('mileage', 'purpose', new_column_name='trip_purpose')
|
||||
|
||||
if 'mileage' in existing_tables:
|
||||
mileage_columns = [col['name'] for col in inspector.get_columns('mileage')]
|
||||
|
||||
# Drop added columns
|
||||
for col in ['currency_code', 'reimbursed_at', 'reimbursed', 'is_round_trip', 'vehicle_description',
|
||||
'rate_per_mile', 'distance_miles', 'end_odometer', 'start_odometer', 'description']:
|
||||
if col in mileage_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('mileage', schema=None) as batch_op:
|
||||
batch_op.drop_column(col)
|
||||
else:
|
||||
op.drop_column('mileage', col)
|
||||
|
||||
# Rename columns back and revert nullability
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('mileage', schema=None) as batch_op:
|
||||
if 'rate_per_km' in mileage_columns:
|
||||
batch_op.alter_column('rate_per_km', nullable=True, server_default=None)
|
||||
if 'calculated_amount' in mileage_columns:
|
||||
batch_op.alter_column('calculated_amount', new_column_name='total_amount')
|
||||
if 'license_plate' in mileage_columns:
|
||||
batch_op.alter_column('license_plate', new_column_name='vehicle_registration')
|
||||
if 'purpose' in mileage_columns:
|
||||
batch_op.alter_column('purpose', new_column_name='trip_purpose')
|
||||
else:
|
||||
if 'rate_per_km' in mileage_columns:
|
||||
op.alter_column('mileage', 'rate_per_km', nullable=True)
|
||||
if 'calculated_amount' in mileage_columns:
|
||||
op.alter_column('mileage', 'calculated_amount', new_column_name='total_amount')
|
||||
if 'license_plate' in mileage_columns:
|
||||
op.alter_column('mileage', 'license_plate', new_column_name='vehicle_registration')
|
||||
if 'purpose' in mileage_columns:
|
||||
op.alter_column('mileage', 'purpose', new_column_name='trip_purpose')
|
||||
|
||||
@@ -18,11 +18,23 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add prepaid hours configuration and ledger tracking."""
|
||||
with op.batch_alter_table('clients', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prepaid_hours_monthly', sa.Numeric(7, 2), nullable=True))
|
||||
batch_op.add_column(sa.Column('prepaid_reset_day', sa.Integer(), nullable=False, server_default='1'))
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Add columns to clients table (idempotent)
|
||||
if 'clients' in existing_tables:
|
||||
clients_columns = [col['name'] for col in inspector.get_columns('clients')]
|
||||
with op.batch_alter_table('clients', schema=None) as batch_op:
|
||||
if 'prepaid_hours_monthly' not in clients_columns:
|
||||
batch_op.add_column(sa.Column('prepaid_hours_monthly', sa.Numeric(7, 2), nullable=True))
|
||||
if 'prepaid_reset_day' not in clients_columns:
|
||||
batch_op.add_column(sa.Column('prepaid_reset_day', sa.Integer(), nullable=False, server_default='1'))
|
||||
|
||||
op.create_table(
|
||||
# Create table (idempotent)
|
||||
if 'client_prepaid_consumptions' not in existing_tables:
|
||||
op.create_table(
|
||||
'client_prepaid_consumptions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('client_id', sa.Integer(), nullable=False),
|
||||
@@ -36,24 +48,36 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['time_entry_id'], ['time_entries.id'], ),
|
||||
sa.ForeignKeyConstraint(['invoice_id'], ['invoices.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('time_entry_id', name='uq_client_prepaid_consumptions_time_entry_id')
|
||||
)
|
||||
op.create_index(
|
||||
'ix_client_prepaid_consumptions_client_month',
|
||||
'client_prepaid_consumptions',
|
||||
['client_id', 'allocation_month'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_client_prepaid_consumptions_invoice_id',
|
||||
'client_prepaid_consumptions',
|
||||
['invoice_id'],
|
||||
unique=False
|
||||
)
|
||||
sa.UniqueConstraint('time_entry_id', name='uq_client_prepaid_consumptions_time_entry_id')
|
||||
)
|
||||
|
||||
# Create indexes (idempotent)
|
||||
existing_indexes = [idx['name'] for idx in inspector.get_indexes('client_prepaid_consumptions')] if 'client_prepaid_consumptions' in inspector.get_table_names() else []
|
||||
|
||||
if 'ix_client_prepaid_consumptions_client_month' not in existing_indexes:
|
||||
op.create_index(
|
||||
'ix_client_prepaid_consumptions_client_month',
|
||||
'client_prepaid_consumptions',
|
||||
['client_id', 'allocation_month'],
|
||||
unique=False
|
||||
)
|
||||
if 'ix_client_prepaid_consumptions_invoice_id' not in existing_indexes:
|
||||
op.create_index(
|
||||
'ix_client_prepaid_consumptions_invoice_id',
|
||||
'client_prepaid_consumptions',
|
||||
['invoice_id'],
|
||||
unique=False
|
||||
)
|
||||
|
||||
# Remove server default now that existing rows are backfilled
|
||||
with op.batch_alter_table('clients', schema=None) as batch_op:
|
||||
batch_op.alter_column('prepaid_reset_day', server_default=None)
|
||||
# Remove server default now that existing rows are backfilled (only if column exists)
|
||||
if 'clients' in existing_tables:
|
||||
clients_columns = [col['name'] for col in inspector.get_columns('clients')]
|
||||
if 'prepaid_reset_day' in clients_columns:
|
||||
# Check if it has a server default
|
||||
prepaid_col = next((col for col in inspector.get_columns('clients') if col['name'] == 'prepaid_reset_day'), None)
|
||||
if prepaid_col and prepaid_col.get('default'):
|
||||
with op.batch_alter_table('clients', schema=None) as batch_op:
|
||||
batch_op.alter_column('prepaid_reset_day', server_default=None)
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2025-01-20
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy import text, inspect
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -20,45 +20,82 @@ depends_on = None
|
||||
def upgrade():
|
||||
"""Add project_id column to kanban_columns for per-project kanban workflows"""
|
||||
|
||||
# Drop the old unique constraint on 'key' alone (handle different constraint names)
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'kanban_columns' not in existing_tables:
|
||||
# Table doesn't exist, skip migration
|
||||
return
|
||||
|
||||
# Get existing columns and constraints
|
||||
kanban_columns = [col['name'] for col in inspector.get_columns('kanban_columns')]
|
||||
kanban_fks = [fk['name'] for fk in inspector.get_foreign_keys('kanban_columns')]
|
||||
kanban_indexes = [idx['name'] for idx in inspector.get_indexes('kanban_columns')]
|
||||
kanban_unique_constraints = []
|
||||
try:
|
||||
op.drop_constraint('kanban_columns_key_key', 'kanban_columns', type_='unique')
|
||||
except Exception:
|
||||
# Try alternative constraint name that might exist
|
||||
try:
|
||||
op.drop_constraint('uq_kanban_columns_key', 'kanban_columns', type_='unique')
|
||||
except Exception:
|
||||
# Constraint might not exist or have different name, continue
|
||||
pass
|
||||
# Try to get unique constraints (method varies by database)
|
||||
if hasattr(inspector, 'get_unique_constraints'):
|
||||
kanban_unique_constraints = [uc['name'] for uc in inspector.get_unique_constraints('kanban_columns')]
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add project_id column (nullable, NULL = global columns)
|
||||
op.add_column('kanban_columns',
|
||||
sa.Column('project_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
# Drop the old unique constraint on 'key' alone (handle different constraint names)
|
||||
for constraint_name in ['kanban_columns_key_key', 'uq_kanban_columns_key']:
|
||||
if constraint_name in kanban_unique_constraints:
|
||||
try:
|
||||
op.drop_constraint(constraint_name, 'kanban_columns', type_='unique')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Add foreign key constraint
|
||||
op.create_foreign_key(
|
||||
'fk_kanban_columns_project_id',
|
||||
'kanban_columns', 'projects',
|
||||
['project_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
# Add project_id column (nullable, NULL = global columns) - idempotent
|
||||
if 'project_id' not in kanban_columns:
|
||||
op.add_column('kanban_columns',
|
||||
sa.Column('project_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
# Create index on project_id for better query performance
|
||||
op.create_index('idx_kanban_columns_project_id', 'kanban_columns', ['project_id'])
|
||||
# Add foreign key constraint - idempotent
|
||||
if 'fk_kanban_columns_project_id' not in kanban_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('kanban_columns', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
'fk_kanban_columns_project_id',
|
||||
'projects',
|
||||
['project_id'], ['id']
|
||||
)
|
||||
else:
|
||||
op.create_foreign_key(
|
||||
'fk_kanban_columns_project_id',
|
||||
'kanban_columns', 'projects',
|
||||
['project_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
|
||||
# Create index on project_id for better query performance - idempotent
|
||||
if 'idx_kanban_columns_project_id' not in kanban_indexes:
|
||||
op.create_index('idx_kanban_columns_project_id', 'kanban_columns', ['project_id'])
|
||||
|
||||
# Explicitly set project_id to NULL for existing columns (they are global columns)
|
||||
connection = op.get_bind()
|
||||
connection.execute(text("UPDATE kanban_columns SET project_id = NULL WHERE project_id IS NULL"))
|
||||
if 'project_id' in kanban_columns:
|
||||
try:
|
||||
conn.execute(text("UPDATE kanban_columns SET project_id = NULL WHERE project_id IS NULL"))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Create new unique constraint on (key, project_id)
|
||||
# Create new unique constraint on (key, project_id) - idempotent
|
||||
# This allows the same key to exist for different projects, but unique per project
|
||||
# Note: PostgreSQL allows multiple NULLs in unique constraints, so global columns can share keys
|
||||
op.create_unique_constraint(
|
||||
'uq_kanban_column_key_project',
|
||||
'kanban_columns',
|
||||
['key', 'project_id']
|
||||
)
|
||||
if 'uq_kanban_column_key_project' not in kanban_unique_constraints:
|
||||
try:
|
||||
op.create_unique_constraint(
|
||||
'uq_kanban_column_key_project',
|
||||
'kanban_columns',
|
||||
['key', 'project_id']
|
||||
)
|
||||
except Exception:
|
||||
# Constraint might already exist with different name, skip
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -86,10 +86,38 @@ def upgrade():
|
||||
op.create_index('ix_invoice_emails_status', 'invoice_emails', ['status'])
|
||||
op.create_index('ix_invoice_emails_sent_at', 'invoice_emails', ['sent_at'])
|
||||
|
||||
# Add recurring_invoice_id to invoices table
|
||||
op.add_column('invoices', sa.Column('recurring_invoice_id', sa.Integer(), nullable=True))
|
||||
op.create_index('ix_invoices_recurring_invoice_id', 'invoices', ['recurring_invoice_id'])
|
||||
op.create_foreign_key('fk_invoices_recurring_invoice_id', 'invoices', 'recurring_invoices', ['recurring_invoice_id'], ['id'], ondelete='SET NULL')
|
||||
# Add recurring_invoice_id to invoices table (idempotent)
|
||||
from sqlalchemy import inspect
|
||||
inspector = inspect(op.get_bind())
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'invoices' in existing_tables:
|
||||
invoices_columns = [col['name'] for col in inspector.get_columns('invoices')]
|
||||
invoices_indexes = [idx['name'] for idx in inspector.get_indexes('invoices')]
|
||||
invoices_fks = [fk['name'] for fk in inspector.get_foreign_keys('invoices')]
|
||||
is_sqlite = op.get_bind().dialect.name == 'sqlite'
|
||||
|
||||
if 'recurring_invoice_id' not in invoices_columns:
|
||||
op.add_column('invoices', sa.Column('recurring_invoice_id', sa.Integer(), nullable=True))
|
||||
|
||||
if 'ix_invoices_recurring_invoice_id' not in invoices_indexes:
|
||||
try:
|
||||
op.create_index('ix_invoices_recurring_invoice_id', 'invoices', ['recurring_invoice_id'])
|
||||
except:
|
||||
pass
|
||||
|
||||
if 'recurring_invoice_id' in invoices_columns and 'fk_invoices_recurring_invoice_id' not in invoices_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('invoices', schema=None) as batch_op:
|
||||
try:
|
||||
batch_op.create_foreign_key('fk_invoices_recurring_invoice_id', 'recurring_invoices', ['recurring_invoice_id'], ['id'])
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
op.create_foreign_key('fk_invoices_recurring_invoice_id', 'invoices', 'recurring_invoices', ['recurring_invoice_id'], ['id'], ondelete='SET NULL')
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -18,23 +18,46 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add client_portal_enabled and client_id columns to users table"""
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Add client_portal_enabled column
|
||||
op.add_column('users',
|
||||
sa.Column('client_portal_enabled', sa.Boolean(), nullable=False, server_default='0')
|
||||
)
|
||||
if 'users' not in existing_tables:
|
||||
return
|
||||
|
||||
# Add client_id column with foreign key
|
||||
op.add_column('users',
|
||||
sa.Column('client_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
op.create_index('ix_users_client_id', 'users', ['client_id'])
|
||||
op.create_foreign_key(
|
||||
'fk_users_client_id',
|
||||
'users', 'clients',
|
||||
['client_id'], ['id'],
|
||||
ondelete='SET NULL'
|
||||
)
|
||||
users_columns = [col['name'] for col in inspector.get_columns('users')]
|
||||
users_indexes = [idx['name'] for idx in inspector.get_indexes('users')]
|
||||
users_fks = [fk['name'] for fk in inspector.get_foreign_keys('users')]
|
||||
|
||||
# Add client_portal_enabled column (idempotent)
|
||||
if 'client_portal_enabled' not in users_columns:
|
||||
op.add_column('users',
|
||||
sa.Column('client_portal_enabled', sa.Boolean(), nullable=False, server_default='0')
|
||||
)
|
||||
|
||||
# Add client_id column with foreign key (idempotent)
|
||||
if 'client_id' not in users_columns:
|
||||
op.add_column('users',
|
||||
sa.Column('client_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
if 'client_id' in users_columns:
|
||||
if 'ix_users_client_id' not in users_indexes:
|
||||
op.create_index('ix_users_client_id', 'users', ['client_id'])
|
||||
|
||||
if 'fk_users_client_id' not in users_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('users', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_users_client_id', 'clients', ['client_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key(
|
||||
'fk_users_client_id',
|
||||
'users', 'clients',
|
||||
['client_id'], ['id'],
|
||||
ondelete='SET NULL'
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -17,31 +17,78 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add portal_enabled, portal_username, and portal_password_hash columns to clients table"""
|
||||
"""Add portal_enabled, portal_username, portal_password_hash, and portal_issues_enabled columns to clients table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
|
||||
# Add portal_enabled column
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_enabled', sa.Boolean(), nullable=False, server_default='0')
|
||||
)
|
||||
# Determine boolean default based on database dialect
|
||||
bool_true_default = '1' if dialect_name == 'sqlite' else ('true' if dialect_name == 'postgresql' else '1')
|
||||
bool_false_default = '0' if dialect_name == 'sqlite' else ('false' if dialect_name == 'postgresql' else '0')
|
||||
|
||||
# Add portal_username column
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_username', sa.String(length=80), nullable=True)
|
||||
)
|
||||
op.create_index('ix_clients_portal_username', 'clients', ['portal_username'], unique=True)
|
||||
# Check existing columns (idempotent)
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'clients' not in existing_tables:
|
||||
return
|
||||
|
||||
# Add portal_password_hash column
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_password_hash', sa.String(length=255), nullable=True)
|
||||
)
|
||||
clients_columns = {c['name'] for c in inspector.get_columns('clients')}
|
||||
clients_indexes = [idx['name'] for idx in inspector.get_indexes('clients')]
|
||||
|
||||
# Add portal_enabled column (idempotent)
|
||||
if 'portal_enabled' not in clients_columns:
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_enabled', sa.Boolean(), nullable=False, server_default=sa.text(bool_false_default))
|
||||
)
|
||||
|
||||
# Add portal_username column (idempotent)
|
||||
if 'portal_username' not in clients_columns:
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_username', sa.String(length=80), nullable=True)
|
||||
)
|
||||
|
||||
# Create index for portal_username (idempotent)
|
||||
if 'ix_clients_portal_username' not in clients_indexes:
|
||||
try:
|
||||
op.create_index('ix_clients_portal_username', 'clients', ['portal_username'], unique=True)
|
||||
except:
|
||||
pass # Index might already exist
|
||||
|
||||
# Add portal_password_hash column (idempotent)
|
||||
if 'portal_password_hash' not in clients_columns:
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_password_hash', sa.String(length=255), nullable=True)
|
||||
)
|
||||
|
||||
# Add portal_issues_enabled column (idempotent) - default True
|
||||
if 'portal_issues_enabled' not in clients_columns:
|
||||
op.add_column('clients',
|
||||
sa.Column('portal_issues_enabled', sa.Boolean(), nullable=False, server_default=sa.text(bool_true_default))
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove client portal columns from clients table"""
|
||||
|
||||
# Drop columns
|
||||
op.drop_index('ix_clients_portal_username', 'clients')
|
||||
op.drop_column('clients', 'portal_password_hash')
|
||||
op.drop_column('clients', 'portal_username')
|
||||
op.drop_column('clients', 'portal_enabled')
|
||||
try:
|
||||
op.drop_index('ix_clients_portal_username', 'clients')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_column('clients', 'portal_issues_enabled')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_column('clients', 'portal_password_hash')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_column('clients', 'portal_username')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_column('clients', 'portal_enabled')
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -18,9 +18,15 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add offers table and foreign keys to projects and invoices"""
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Create offers table
|
||||
op.create_table('offers',
|
||||
# Create offers table (idempotent)
|
||||
if 'offers' not in existing_tables:
|
||||
op.create_table('offers',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('offer_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('client_id', sa.Integer(), nullable=False),
|
||||
@@ -47,28 +53,64 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='SET NULL'),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['accepted_by'], ['users.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes (idempotent)
|
||||
offers_indexes = [idx['name'] for idx in inspector.get_indexes('offers')] if 'offers' in inspector.get_table_names() else []
|
||||
|
||||
if 'ix_offers_offer_number' not in offers_indexes:
|
||||
op.create_index('ix_offers_offer_number', 'offers', ['offer_number'], unique=True)
|
||||
if 'ix_offers_client_id' not in offers_indexes:
|
||||
op.create_index('ix_offers_client_id', 'offers', ['client_id'])
|
||||
if 'ix_offers_project_id' not in offers_indexes:
|
||||
op.create_index('ix_offers_project_id', 'offers', ['project_id'])
|
||||
if 'ix_offers_status' not in offers_indexes:
|
||||
op.create_index('ix_offers_status', 'offers', ['status'])
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_offers_offer_number', 'offers', ['offer_number'], unique=True)
|
||||
op.create_index('ix_offers_client_id', 'offers', ['client_id'])
|
||||
op.create_index('ix_offers_project_id', 'offers', ['project_id'])
|
||||
op.create_index('ix_offers_status', 'offers', ['status'])
|
||||
# Add offer_id to projects table (idempotent)
|
||||
if 'projects' in existing_tables:
|
||||
projects_columns = [col['name'] for col in inspector.get_columns('projects')]
|
||||
projects_indexes = [idx['name'] for idx in inspector.get_indexes('projects')]
|
||||
projects_fks = [fk['name'] for fk in inspector.get_foreign_keys('projects')]
|
||||
|
||||
if 'offer_id' not in projects_columns:
|
||||
op.add_column('projects',
|
||||
sa.Column('offer_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
if 'offer_id' in projects_columns:
|
||||
if 'ix_projects_offer_id' not in projects_indexes:
|
||||
op.create_index('ix_projects_offer_id', 'projects', ['offer_id'])
|
||||
|
||||
if 'fk_projects_offer_id' not in projects_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_projects_offer_id', 'offers', ['offer_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_projects_offer_id', 'projects', 'offers', ['offer_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
# Add offer_id to projects table
|
||||
op.add_column('projects',
|
||||
sa.Column('offer_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
op.create_foreign_key('fk_projects_offer_id', 'projects', 'offers', ['offer_id'], ['id'], ondelete='SET NULL')
|
||||
op.create_index('ix_projects_offer_id', 'projects', ['offer_id'])
|
||||
|
||||
# Add offer_id to invoices table
|
||||
op.add_column('invoices',
|
||||
sa.Column('offer_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
op.create_foreign_key('fk_invoices_offer_id', 'invoices', 'offers', ['offer_id'], ['id'], ondelete='SET NULL')
|
||||
op.create_index('ix_invoices_offer_id', 'invoices', ['offer_id'])
|
||||
# Add offer_id to invoices table (idempotent)
|
||||
if 'invoices' in existing_tables:
|
||||
invoices_columns = [col['name'] for col in inspector.get_columns('invoices')]
|
||||
invoices_indexes = [idx['name'] for idx in inspector.get_indexes('invoices')]
|
||||
invoices_fks = [fk['name'] for fk in inspector.get_foreign_keys('invoices')]
|
||||
|
||||
if 'offer_id' not in invoices_columns:
|
||||
op.add_column('invoices',
|
||||
sa.Column('offer_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
if 'offer_id' in invoices_columns:
|
||||
if 'ix_invoices_offer_id' not in invoices_indexes:
|
||||
op.create_index('ix_invoices_offer_id', 'invoices', ['offer_id'])
|
||||
|
||||
if 'fk_invoices_offer_id' not in invoices_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('invoices', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_invoices_offer_id', 'offers', ['offer_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_invoices_offer_id', 'invoices', 'offers', ['offer_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -19,97 +19,282 @@ depends_on = None
|
||||
def upgrade():
|
||||
"""Rename offers to quotes and add new features"""
|
||||
|
||||
# Drop indexes on offers table before renaming
|
||||
op.drop_index('ix_offers_offer_number', 'offers')
|
||||
op.drop_index('ix_offers_client_id', 'offers')
|
||||
op.drop_index('ix_offers_project_id', 'offers')
|
||||
op.drop_index('ix_offers_status', 'offers')
|
||||
conn = op.get_bind()
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
from sqlalchemy import inspect
|
||||
inspector = inspect(conn)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Rename offers table to quotes
|
||||
op.rename_table('offers', 'quotes')
|
||||
# Only proceed if offers table exists
|
||||
if 'offers' not in existing_tables and 'quotes' in existing_tables:
|
||||
# Already migrated, skip
|
||||
return
|
||||
|
||||
if 'offers' in existing_tables:
|
||||
# Drop indexes on offers table before renaming
|
||||
try:
|
||||
op.drop_index('ix_offers_offer_number', 'offers')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_index('ix_offers_client_id', 'offers')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_index('ix_offers_project_id', 'offers')
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
op.drop_index('ix_offers_status', 'offers')
|
||||
except:
|
||||
pass
|
||||
|
||||
# Rename offers table to quotes
|
||||
op.rename_table('offers', 'quotes')
|
||||
|
||||
# Rename columns in quotes table
|
||||
op.alter_column('quotes', 'offer_number', new_column_name='quote_number', existing_type=sa.String(length=50), existing_nullable=False)
|
||||
if 'quotes' in existing_tables:
|
||||
quotes_columns = [col['name'] for col in inspector.get_columns('quotes')]
|
||||
|
||||
if 'offer_number' in quotes_columns and 'quote_number' not in quotes_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('quotes', schema=None) as batch_op:
|
||||
batch_op.alter_column('offer_number', new_column_name='quote_number')
|
||||
else:
|
||||
op.alter_column('quotes', 'offer_number', new_column_name='quote_number', existing_type=sa.String(length=50), existing_nullable=False)
|
||||
# Refresh inspector after column rename
|
||||
inspector = inspect(conn)
|
||||
quotes_columns = [col['name'] for col in inspector.get_columns('quotes')]
|
||||
|
||||
# Add new columns to quotes table (idempotent)
|
||||
if 'subtotal' not in quotes_columns:
|
||||
op.add_column('quotes',
|
||||
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0')
|
||||
)
|
||||
if 'tax_amount' not in quotes_columns:
|
||||
op.add_column('quotes',
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0')
|
||||
)
|
||||
if 'visible_to_client' not in quotes_columns:
|
||||
op.add_column('quotes',
|
||||
sa.Column('visible_to_client', sa.Boolean(), nullable=False, server_default='false')
|
||||
)
|
||||
if 'template_id' not in quotes_columns:
|
||||
op.add_column('quotes',
|
||||
sa.Column('template_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
# Refresh columns after adding
|
||||
quotes_columns = [col['name'] for col in inspector.get_columns('quotes')]
|
||||
else:
|
||||
quotes_columns = []
|
||||
|
||||
# Add new columns to quotes table
|
||||
op.add_column('quotes',
|
||||
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0')
|
||||
)
|
||||
op.add_column('quotes',
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0')
|
||||
)
|
||||
op.add_column('quotes',
|
||||
sa.Column('visible_to_client', sa.Boolean(), nullable=False, server_default='false')
|
||||
)
|
||||
op.add_column('quotes',
|
||||
sa.Column('template_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
# Create quote_items table (idempotent)
|
||||
if 'quote_items' not in existing_tables:
|
||||
op.create_table('quote_items',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('quote_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=500), nullable=False),
|
||||
sa.Column('quantity', sa.Numeric(precision=10, scale=2), nullable=False, server_default='1'),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('unit', sa.String(length=20), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['quote_id'], ['quotes.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# Create index (idempotent)
|
||||
try:
|
||||
op.create_index('ix_quote_items_quote_id', 'quote_items', ['quote_id'])
|
||||
except:
|
||||
pass # Index might already exist
|
||||
else:
|
||||
# Table exists, ensure index exists
|
||||
try:
|
||||
quote_items_indexes = [idx['name'] for idx in inspector.get_indexes('quote_items')]
|
||||
if 'ix_quote_items_quote_id' not in quote_items_indexes:
|
||||
op.create_index('ix_quote_items_quote_id', 'quote_items', ['quote_id'])
|
||||
except:
|
||||
pass
|
||||
|
||||
# Create quote_items table
|
||||
op.create_table('quote_items',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('quote_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=500), nullable=False),
|
||||
sa.Column('quantity', sa.Numeric(precision=10, scale=2), nullable=False, server_default='1'),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('unit', sa.String(length=20), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['quote_id'], ['quotes.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_quote_items_quote_id', 'quote_items', ['quote_id'])
|
||||
# Create quote_pdf_templates table (idempotent)
|
||||
if 'quote_pdf_templates' not in existing_tables:
|
||||
op.create_table('quote_pdf_templates',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('page_size', sa.String(length=20), nullable=False),
|
||||
sa.Column('template_html', sa.Text(), nullable=True),
|
||||
sa.Column('template_css', sa.Text(), nullable=True),
|
||||
sa.Column('design_json', sa.Text(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('page_size')
|
||||
)
|
||||
|
||||
# Create quote_pdf_templates table
|
||||
op.create_table('quote_pdf_templates',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('page_size', sa.String(length=20), nullable=False),
|
||||
sa.Column('template_html', sa.Text(), nullable=True),
|
||||
sa.Column('template_css', sa.Text(), nullable=True),
|
||||
sa.Column('design_json', sa.Text(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('page_size')
|
||||
)
|
||||
# Recreate indexes in quotes table with new names (idempotent)
|
||||
if 'quotes' in existing_tables:
|
||||
quotes_indexes = [idx['name'] for idx in inspector.get_indexes('quotes')]
|
||||
quotes_columns = [col['name'] for col in inspector.get_columns('quotes')]
|
||||
|
||||
# Only create index on quote_number if the column exists
|
||||
if 'quote_number' in quotes_columns and 'ix_quotes_quote_number' not in quotes_indexes:
|
||||
try:
|
||||
op.create_index('ix_quotes_quote_number', 'quotes', ['quote_number'], unique=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
if 'ix_quotes_client_id' not in quotes_indexes:
|
||||
try:
|
||||
op.create_index('ix_quotes_client_id', 'quotes', ['client_id'])
|
||||
except:
|
||||
pass
|
||||
if 'ix_quotes_project_id' not in quotes_indexes:
|
||||
try:
|
||||
op.create_index('ix_quotes_project_id', 'quotes', ['project_id'])
|
||||
except:
|
||||
pass
|
||||
if 'ix_quotes_status' not in quotes_indexes:
|
||||
try:
|
||||
op.create_index('ix_quotes_status', 'quotes', ['status'])
|
||||
except:
|
||||
pass
|
||||
if 'template_id' in quotes_columns and 'ix_quotes_template_id' not in quotes_indexes:
|
||||
try:
|
||||
op.create_index('ix_quotes_template_id', 'quotes', ['template_id'])
|
||||
except:
|
||||
pass
|
||||
|
||||
# Recreate indexes in quotes table with new names
|
||||
op.create_index('ix_quotes_quote_number', 'quotes', ['quote_number'], unique=True)
|
||||
op.create_index('ix_quotes_client_id', 'quotes', ['client_id'])
|
||||
op.create_index('ix_quotes_project_id', 'quotes', ['project_id'])
|
||||
op.create_index('ix_quotes_status', 'quotes', ['status'])
|
||||
op.create_index('ix_quotes_template_id', 'quotes', ['template_id'])
|
||||
# Update foreign key constraints (refresh inspector)
|
||||
if 'quotes' in inspector.get_table_names():
|
||||
quotes_fks = [fk['name'] for fk in inspector.get_foreign_keys('quotes')]
|
||||
quotes_columns = [col['name'] for col in inspector.get_columns('quotes')]
|
||||
else:
|
||||
quotes_fks = []
|
||||
quotes_columns = []
|
||||
|
||||
# Update foreign key constraints
|
||||
op.drop_constraint('offers_client_id_fkey', 'quotes', type_='foreignkey')
|
||||
op.create_foreign_key('fk_quotes_client_id', 'quotes', 'clients', ['client_id'], ['id'], ondelete='CASCADE')
|
||||
|
||||
op.drop_constraint('offers_project_id_fkey', 'quotes', type_='foreignkey')
|
||||
op.create_foreign_key('fk_quotes_project_id', 'quotes', 'projects', ['project_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
op.drop_constraint('offers_created_by_fkey', 'quotes', type_='foreignkey')
|
||||
op.create_foreign_key('fk_quotes_created_by', 'quotes', 'users', ['created_by'], ['id'], ondelete='CASCADE')
|
||||
|
||||
op.drop_constraint('offers_accepted_by_fkey', 'quotes', type_='foreignkey')
|
||||
op.create_foreign_key('fk_quotes_accepted_by', 'quotes', 'users', ['accepted_by'], ['id'], ondelete='SET NULL')
|
||||
|
||||
# Add foreign key for template_id
|
||||
op.create_foreign_key('fk_quotes_template_id', 'quotes', 'quote_pdf_templates', ['template_id'], ['id'], ondelete='SET NULL')
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('quotes', schema=None) as batch_op:
|
||||
# Drop old constraints if they exist
|
||||
for old_name in ['offers_client_id_fkey', 'offers_project_id_fkey', 'offers_created_by_fkey', 'offers_accepted_by_fkey']:
|
||||
if old_name in quotes_fks:
|
||||
try:
|
||||
batch_op.drop_constraint(old_name, type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
|
||||
# Create new foreign keys
|
||||
if 'fk_quotes_client_id' not in quotes_fks:
|
||||
batch_op.create_foreign_key('fk_quotes_client_id', 'clients', ['client_id'], ['id'])
|
||||
if 'fk_quotes_project_id' not in quotes_fks:
|
||||
batch_op.create_foreign_key('fk_quotes_project_id', 'projects', ['project_id'], ['id'])
|
||||
if 'fk_quotes_created_by' not in quotes_fks:
|
||||
batch_op.create_foreign_key('fk_quotes_created_by', 'users', ['created_by'], ['id'])
|
||||
if 'fk_quotes_accepted_by' not in quotes_fks:
|
||||
batch_op.create_foreign_key('fk_quotes_accepted_by', 'users', ['accepted_by'], ['id'])
|
||||
if 'fk_quotes_template_id' not in quotes_fks and 'template_id' in quotes_columns:
|
||||
batch_op.create_foreign_key('fk_quotes_template_id', 'quote_pdf_templates', ['template_id'], ['id'])
|
||||
else:
|
||||
# PostgreSQL and other databases
|
||||
try:
|
||||
op.drop_constraint('offers_client_id_fkey', 'quotes', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
op.create_foreign_key('fk_quotes_client_id', 'quotes', 'clients', ['client_id'], ['id'], ondelete='CASCADE')
|
||||
|
||||
try:
|
||||
op.drop_constraint('offers_project_id_fkey', 'quotes', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
op.create_foreign_key('fk_quotes_project_id', 'quotes', 'projects', ['project_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
try:
|
||||
op.drop_constraint('offers_created_by_fkey', 'quotes', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
op.create_foreign_key('fk_quotes_created_by', 'quotes', 'users', ['created_by'], ['id'], ondelete='CASCADE')
|
||||
|
||||
try:
|
||||
op.drop_constraint('offers_accepted_by_fkey', 'quotes', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
op.create_foreign_key('fk_quotes_accepted_by', 'quotes', 'users', ['accepted_by'], ['id'], ondelete='SET NULL')
|
||||
|
||||
# Add foreign key for template_id
|
||||
if 'template_id' in quotes_columns:
|
||||
op.create_foreign_key('fk_quotes_template_id', 'quotes', 'quote_pdf_templates', ['template_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
# Update projects table - rename offer_id to quote_id
|
||||
op.alter_column('projects', 'offer_id', new_column_name='quote_id', existing_type=sa.Integer(), existing_nullable=True)
|
||||
op.drop_index('ix_projects_offer_id', 'projects')
|
||||
op.create_index('ix_projects_quote_id', 'projects', ['quote_id'])
|
||||
op.drop_constraint('fk_projects_offer_id', 'projects', type_='foreignkey')
|
||||
op.create_foreign_key('fk_projects_quote_id', 'projects', 'quotes', ['quote_id'], ['id'], ondelete='SET NULL')
|
||||
if 'projects' in inspector.get_table_names():
|
||||
projects_columns = [col['name'] for col in inspector.get_columns('projects')]
|
||||
projects_indexes = [idx['name'] for idx in inspector.get_indexes('projects')]
|
||||
projects_fks = [fk['name'] for fk in inspector.get_foreign_keys('projects')]
|
||||
|
||||
if 'offer_id' in projects_columns and 'quote_id' not in projects_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('projects', schema=None) as batch_op:
|
||||
batch_op.alter_column('offer_id', new_column_name='quote_id')
|
||||
if 'fk_projects_offer_id' in projects_fks:
|
||||
batch_op.drop_constraint('fk_projects_offer_id', type_='foreignkey')
|
||||
batch_op.create_foreign_key('fk_projects_quote_id', 'quotes', ['quote_id'], ['id'])
|
||||
else:
|
||||
# PostgreSQL: Drop constraints and indexes BEFORE renaming
|
||||
if 'fk_projects_offer_id' in projects_fks:
|
||||
try:
|
||||
op.drop_constraint('fk_projects_offer_id', 'projects', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
if 'ix_projects_offer_id' in projects_indexes:
|
||||
try:
|
||||
op.drop_index('ix_projects_offer_id', 'projects')
|
||||
except:
|
||||
pass
|
||||
# Now rename the column
|
||||
op.alter_column('projects', 'offer_id', new_column_name='quote_id', existing_type=sa.Integer(), existing_nullable=True)
|
||||
# Create new index and foreign key
|
||||
if 'ix_projects_quote_id' not in projects_indexes:
|
||||
try:
|
||||
op.create_index('ix_projects_quote_id', 'projects', ['quote_id'])
|
||||
except:
|
||||
pass
|
||||
if 'fk_projects_quote_id' not in projects_fks:
|
||||
op.create_foreign_key('fk_projects_quote_id', 'projects', 'quotes', ['quote_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
# Update invoices table - rename offer_id to quote_id
|
||||
op.alter_column('invoices', 'offer_id', new_column_name='quote_id', existing_type=sa.Integer(), existing_nullable=True)
|
||||
op.drop_index('ix_invoices_offer_id', 'invoices')
|
||||
op.create_index('ix_invoices_quote_id', 'invoices', ['quote_id'])
|
||||
op.drop_constraint('fk_invoices_offer_id', 'invoices', type_='foreignkey')
|
||||
op.create_foreign_key('fk_invoices_quote_id', 'invoices', 'quotes', ['quote_id'], ['id'], ondelete='SET NULL')
|
||||
if 'invoices' in inspector.get_table_names():
|
||||
invoices_columns = [col['name'] for col in inspector.get_columns('invoices')]
|
||||
invoices_indexes = [idx['name'] for idx in inspector.get_indexes('invoices')]
|
||||
invoices_fks = [fk['name'] for fk in inspector.get_foreign_keys('invoices')]
|
||||
|
||||
if 'offer_id' in invoices_columns and 'quote_id' not in invoices_columns:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('invoices', schema=None) as batch_op:
|
||||
batch_op.alter_column('offer_id', new_column_name='quote_id')
|
||||
if 'fk_invoices_offer_id' in invoices_fks:
|
||||
batch_op.drop_constraint('fk_invoices_offer_id', type_='foreignkey')
|
||||
batch_op.create_foreign_key('fk_invoices_quote_id', 'quotes', ['quote_id'], ['id'])
|
||||
else:
|
||||
# PostgreSQL: Drop constraints and indexes BEFORE renaming
|
||||
if 'fk_invoices_offer_id' in invoices_fks:
|
||||
try:
|
||||
op.drop_constraint('fk_invoices_offer_id', 'invoices', type_='foreignkey')
|
||||
except:
|
||||
pass
|
||||
if 'ix_invoices_offer_id' in invoices_indexes:
|
||||
try:
|
||||
op.drop_index('ix_invoices_offer_id', 'invoices')
|
||||
except:
|
||||
pass
|
||||
# Now rename the column
|
||||
op.alter_column('invoices', 'offer_id', new_column_name='quote_id', existing_type=sa.Integer(), existing_nullable=True)
|
||||
# Create new index and foreign key
|
||||
if 'ix_invoices_quote_id' not in invoices_indexes:
|
||||
try:
|
||||
op.create_index('ix_invoices_quote_id', 'invoices', ['quote_id'])
|
||||
except:
|
||||
pass
|
||||
if 'fk_invoices_quote_id' not in invoices_fks:
|
||||
op.create_foreign_key('fk_invoices_quote_id', 'invoices', 'quotes', ['quote_id'], ['id'], ondelete='SET NULL')
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -17,21 +17,42 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add quote_id and is_internal fields to comments table"""
|
||||
# Add quote_id column
|
||||
op.add_column('comments',
|
||||
sa.Column('quote_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Add is_internal column (True = internal team comment, False = client-visible)
|
||||
op.add_column('comments',
|
||||
sa.Column('is_internal', sa.Boolean(), nullable=False, server_default='true')
|
||||
)
|
||||
if 'comments' not in existing_tables:
|
||||
return
|
||||
|
||||
# Create index on quote_id
|
||||
op.create_index('ix_comments_quote_id', 'comments', ['quote_id'], unique=False)
|
||||
comments_columns = [col['name'] for col in inspector.get_columns('comments')]
|
||||
comments_indexes = [idx['name'] for idx in inspector.get_indexes('comments')]
|
||||
comments_fks = [fk['name'] for fk in inspector.get_foreign_keys('comments')]
|
||||
|
||||
# Add foreign key constraint
|
||||
op.create_foreign_key('fk_comments_quote_id', 'comments', 'quotes', ['quote_id'], ['id'], ondelete='CASCADE')
|
||||
# Add quote_id column (idempotent)
|
||||
if 'quote_id' not in comments_columns:
|
||||
op.add_column('comments',
|
||||
sa.Column('quote_id', sa.Integer(), nullable=True)
|
||||
)
|
||||
|
||||
# Add is_internal column (True = internal team comment, False = client-visible) (idempotent)
|
||||
if 'is_internal' not in comments_columns:
|
||||
op.add_column('comments',
|
||||
sa.Column('is_internal', sa.Boolean(), nullable=False, server_default='true')
|
||||
)
|
||||
|
||||
# Create index on quote_id (idempotent)
|
||||
if 'quote_id' in comments_columns and 'ix_comments_quote_id' not in comments_indexes:
|
||||
op.create_index('ix_comments_quote_id', 'comments', ['quote_id'], unique=False)
|
||||
|
||||
# Add foreign key constraint (idempotent)
|
||||
if 'quote_id' in comments_columns and 'fk_comments_quote_id' not in comments_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('comments', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_comments_quote_id', 'quotes', ['quote_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_comments_quote_id', 'comments', 'quotes', ['quote_id'], ['id'], ondelete='CASCADE')
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -79,11 +79,22 @@ def upgrade():
|
||||
op.create_index('ix_quotes_approved_by', 'quotes', ['approved_by'], unique=False)
|
||||
|
||||
# Add foreign keys (if they don't exist)
|
||||
if not constraint_exists('quotes', 'fk_quotes_approved_by'):
|
||||
op.create_foreign_key('fk_quotes_approved_by', 'quotes', 'users', ['approved_by'], ['id'], ondelete='SET NULL')
|
||||
bind = op.get_bind()
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
|
||||
if not constraint_exists('quotes', 'fk_quotes_rejected_by'):
|
||||
op.create_foreign_key('fk_quotes_rejected_by', 'quotes', 'users', ['rejected_by'], ['id'], ondelete='SET NULL')
|
||||
if not constraint_exists('quotes', 'fk_quotes_approved_by') and column_exists('quotes', 'approved_by'):
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('quotes', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_quotes_approved_by', 'users', ['approved_by'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_quotes_approved_by', 'quotes', 'users', ['approved_by'], ['id'], ondelete='SET NULL')
|
||||
|
||||
if not constraint_exists('quotes', 'fk_quotes_rejected_by') and column_exists('quotes', 'rejected_by'):
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('quotes', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_quotes_rejected_by', 'users', ['rejected_by'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_quotes_rejected_by', 'quotes', 'users', ['rejected_by'], ['id'], ondelete='SET NULL')
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -170,26 +170,86 @@ def upgrade():
|
||||
op.create_index('ix_project_stock_allocations_warehouse_id', 'project_stock_allocations', ['warehouse_id'], unique=False)
|
||||
op.create_index('ix_project_stock_allocations_allocated_by', 'project_stock_allocations', ['allocated_by'], unique=False)
|
||||
|
||||
# Add inventory fields to quote_items
|
||||
op.add_column('quote_items', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
op.add_column('quote_items', sa.Column('warehouse_id', sa.Integer(), nullable=True))
|
||||
op.add_column('quote_items', sa.Column('is_stock_item', sa.Boolean(), nullable=False, server_default='0'))
|
||||
op.create_index('ix_quote_items_stock_item_id', 'quote_items', ['stock_item_id'], unique=False)
|
||||
op.create_foreign_key('fk_quote_items_stock_item_id', 'quote_items', 'stock_items', ['stock_item_id'], ['id'])
|
||||
op.create_foreign_key('fk_quote_items_warehouse_id', 'quote_items', 'warehouses', ['warehouse_id'], ['id'])
|
||||
# Add inventory fields to quote_items (idempotent)
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
# Add inventory fields to invoice_items
|
||||
op.add_column('invoice_items', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
op.add_column('invoice_items', sa.Column('warehouse_id', sa.Integer(), nullable=True))
|
||||
op.add_column('invoice_items', sa.Column('is_stock_item', sa.Boolean(), nullable=False, server_default='0'))
|
||||
op.create_index('ix_invoice_items_stock_item_id', 'invoice_items', ['stock_item_id'], unique=False)
|
||||
op.create_foreign_key('fk_invoice_items_stock_item_id', 'invoice_items', 'stock_items', ['stock_item_id'], ['id'])
|
||||
op.create_foreign_key('fk_invoice_items_warehouse_id', 'invoice_items', 'warehouses', ['warehouse_id'], ['id'])
|
||||
if 'quote_items' in existing_tables:
|
||||
quote_items_columns = [col['name'] for col in inspector.get_columns('quote_items')]
|
||||
quote_items_indexes = [idx['name'] for idx in inspector.get_indexes('quote_items')]
|
||||
quote_items_fks = [fk['name'] for fk in inspector.get_foreign_keys('quote_items')]
|
||||
|
||||
if 'stock_item_id' not in quote_items_columns:
|
||||
op.add_column('quote_items', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
if 'warehouse_id' not in quote_items_columns:
|
||||
op.add_column('quote_items', sa.Column('warehouse_id', sa.Integer(), nullable=True))
|
||||
if 'is_stock_item' not in quote_items_columns:
|
||||
op.add_column('quote_items', sa.Column('is_stock_item', sa.Boolean(), nullable=False, server_default='0'))
|
||||
|
||||
if 'stock_item_id' in quote_items_columns and 'ix_quote_items_stock_item_id' not in quote_items_indexes:
|
||||
op.create_index('ix_quote_items_stock_item_id', 'quote_items', ['stock_item_id'], unique=False)
|
||||
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('quote_items', schema=None) as batch_op:
|
||||
if 'stock_item_id' in quote_items_columns and 'fk_quote_items_stock_item_id' not in quote_items_fks:
|
||||
batch_op.create_foreign_key('fk_quote_items_stock_item_id', 'stock_items', ['stock_item_id'], ['id'])
|
||||
if 'warehouse_id' in quote_items_columns and 'fk_quote_items_warehouse_id' not in quote_items_fks:
|
||||
batch_op.create_foreign_key('fk_quote_items_warehouse_id', 'warehouses', ['warehouse_id'], ['id'])
|
||||
else:
|
||||
if 'stock_item_id' in quote_items_columns and 'fk_quote_items_stock_item_id' not in quote_items_fks:
|
||||
op.create_foreign_key('fk_quote_items_stock_item_id', 'quote_items', 'stock_items', ['stock_item_id'], ['id'])
|
||||
if 'warehouse_id' in quote_items_columns and 'fk_quote_items_warehouse_id' not in quote_items_fks:
|
||||
op.create_foreign_key('fk_quote_items_warehouse_id', 'quote_items', 'warehouses', ['warehouse_id'], ['id'])
|
||||
|
||||
# Add inventory field to extra_goods
|
||||
op.add_column('extra_goods', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
op.create_index('ix_extra_goods_stock_item_id', 'extra_goods', ['stock_item_id'], unique=False)
|
||||
op.create_foreign_key('fk_extra_goods_stock_item_id', 'extra_goods', 'stock_items', ['stock_item_id'], ['id'])
|
||||
# Add inventory fields to invoice_items (idempotent)
|
||||
if 'invoice_items' in existing_tables:
|
||||
invoice_items_columns = [col['name'] for col in inspector.get_columns('invoice_items')]
|
||||
invoice_items_indexes = [idx['name'] for idx in inspector.get_indexes('invoice_items')]
|
||||
invoice_items_fks = [fk['name'] for fk in inspector.get_foreign_keys('invoice_items')]
|
||||
|
||||
if 'stock_item_id' not in invoice_items_columns:
|
||||
op.add_column('invoice_items', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
if 'warehouse_id' not in invoice_items_columns:
|
||||
op.add_column('invoice_items', sa.Column('warehouse_id', sa.Integer(), nullable=True))
|
||||
if 'is_stock_item' not in invoice_items_columns:
|
||||
op.add_column('invoice_items', sa.Column('is_stock_item', sa.Boolean(), nullable=False, server_default='0'))
|
||||
|
||||
if 'stock_item_id' in invoice_items_columns and 'ix_invoice_items_stock_item_id' not in invoice_items_indexes:
|
||||
op.create_index('ix_invoice_items_stock_item_id', 'invoice_items', ['stock_item_id'], unique=False)
|
||||
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('invoice_items', schema=None) as batch_op:
|
||||
if 'stock_item_id' in invoice_items_columns and 'fk_invoice_items_stock_item_id' not in invoice_items_fks:
|
||||
batch_op.create_foreign_key('fk_invoice_items_stock_item_id', 'stock_items', ['stock_item_id'], ['id'])
|
||||
if 'warehouse_id' in invoice_items_columns and 'fk_invoice_items_warehouse_id' not in invoice_items_fks:
|
||||
batch_op.create_foreign_key('fk_invoice_items_warehouse_id', 'warehouses', ['warehouse_id'], ['id'])
|
||||
else:
|
||||
if 'stock_item_id' in invoice_items_columns and 'fk_invoice_items_stock_item_id' not in invoice_items_fks:
|
||||
op.create_foreign_key('fk_invoice_items_stock_item_id', 'invoice_items', 'stock_items', ['stock_item_id'], ['id'])
|
||||
if 'warehouse_id' in invoice_items_columns and 'fk_invoice_items_warehouse_id' not in invoice_items_fks:
|
||||
op.create_foreign_key('fk_invoice_items_warehouse_id', 'invoice_items', 'warehouses', ['warehouse_id'], ['id'])
|
||||
|
||||
# Add inventory field to extra_goods (idempotent)
|
||||
if 'extra_goods' in existing_tables:
|
||||
extra_goods_columns = [col['name'] for col in inspector.get_columns('extra_goods')]
|
||||
extra_goods_indexes = [idx['name'] for idx in inspector.get_indexes('extra_goods')]
|
||||
extra_goods_fks = [fk['name'] for fk in inspector.get_foreign_keys('extra_goods')]
|
||||
|
||||
if 'stock_item_id' not in extra_goods_columns:
|
||||
op.add_column('extra_goods', sa.Column('stock_item_id', sa.Integer(), nullable=True))
|
||||
|
||||
if 'stock_item_id' in extra_goods_columns and 'ix_extra_goods_stock_item_id' not in extra_goods_indexes:
|
||||
op.create_index('ix_extra_goods_stock_item_id', 'extra_goods', ['stock_item_id'], unique=False)
|
||||
|
||||
if 'stock_item_id' in extra_goods_columns and 'fk_extra_goods_stock_item_id' not in extra_goods_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('extra_goods', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key('fk_extra_goods_stock_item_id', 'stock_items', ['stock_item_id'], ['id'])
|
||||
else:
|
||||
op.create_foreign_key('fk_extra_goods_stock_item_id', 'extra_goods', 'stock_items', ['stock_item_id'], ['id'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -203,14 +203,34 @@ def upgrade():
|
||||
op.create_index(op.f('ix_deal_activities_activity_date'), 'deal_activities', ['activity_date'], unique=False)
|
||||
|
||||
# Add foreign key for related_deal_id in contact_communications (deferred)
|
||||
# This is done after deals table is created
|
||||
op.create_foreign_key(
|
||||
'fk_contact_communications_related_deal_id',
|
||||
'contact_communications',
|
||||
'deals',
|
||||
['related_deal_id'],
|
||||
['id']
|
||||
)
|
||||
# This is done after deals table is created (idempotent)
|
||||
from sqlalchemy import inspect
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'contact_communications' in existing_tables and 'deals' in existing_tables:
|
||||
contact_comm_columns = [col['name'] for col in inspector.get_columns('contact_communications')]
|
||||
contact_comm_fks = [fk['name'] for fk in inspector.get_foreign_keys('contact_communications')]
|
||||
|
||||
if 'related_deal_id' in contact_comm_columns and 'fk_contact_communications_related_deal_id' not in contact_comm_fks:
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('contact_communications', schema=None) as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
'fk_contact_communications_related_deal_id',
|
||||
'deals',
|
||||
['related_deal_id'],
|
||||
['id']
|
||||
)
|
||||
else:
|
||||
op.create_foreign_key(
|
||||
'fk_contact_communications_related_deal_id',
|
||||
'contact_communications',
|
||||
'deals',
|
||||
['related_deal_id'],
|
||||
['id']
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -50,31 +50,57 @@ def upgrade():
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
conn = op.get_bind()
|
||||
is_sqlite = conn.dialect.name == 'sqlite'
|
||||
|
||||
# Make project_id nullable
|
||||
op.alter_column('time_entries', 'project_id',
|
||||
existing_type=sa.Integer(),
|
||||
nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('time_entries', schema=None) as batch_op:
|
||||
batch_op.alter_column('project_id', nullable=True)
|
||||
|
||||
# Add client_id column if it doesn't exist
|
||||
if not _has_column(inspector, 'time_entries', 'client_id'):
|
||||
batch_op.add_column(sa.Column('client_id', sa.Integer(), nullable=True))
|
||||
|
||||
# Recreate foreign key constraint for project_id (nullable)
|
||||
batch_op.create_foreign_key(
|
||||
'fk_time_entries_project_id',
|
||||
'projects',
|
||||
['project_id'], ['id']
|
||||
)
|
||||
|
||||
# Add foreign key constraint for client_id
|
||||
if _has_column(inspector, 'time_entries', 'client_id'):
|
||||
batch_op.create_foreign_key(
|
||||
'fk_time_entries_client_id',
|
||||
'clients',
|
||||
['client_id'], ['id']
|
||||
)
|
||||
else:
|
||||
op.alter_column('time_entries', 'project_id',
|
||||
existing_type=sa.Integer(),
|
||||
nullable=True)
|
||||
|
||||
# Add client_id column if it doesn't exist
|
||||
if not _has_column(inspector, 'time_entries', 'client_id'):
|
||||
op.add_column('time_entries', sa.Column('client_id', sa.Integer(), nullable=True))
|
||||
op.create_index('idx_time_entries_client_id', 'time_entries', ['client_id'])
|
||||
# Add client_id column if it doesn't exist
|
||||
if not _has_column(inspector, 'time_entries', 'client_id'):
|
||||
op.add_column('time_entries', sa.Column('client_id', sa.Integer(), nullable=True))
|
||||
op.create_index('idx_time_entries_client_id', 'time_entries', ['client_id'])
|
||||
|
||||
# Recreate foreign key constraint for project_id (nullable)
|
||||
op.create_foreign_key(
|
||||
'fk_time_entries_project_id',
|
||||
'time_entries', 'projects',
|
||||
['project_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
# Recreate foreign key constraint for project_id (nullable)
|
||||
op.create_foreign_key(
|
||||
'fk_time_entries_project_id',
|
||||
'time_entries', 'projects',
|
||||
['project_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
|
||||
# Add foreign key constraint for client_id
|
||||
op.create_foreign_key(
|
||||
'fk_time_entries_client_id',
|
||||
'time_entries', 'clients',
|
||||
['client_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
# Add foreign key constraint for client_id
|
||||
op.create_foreign_key(
|
||||
'fk_time_entries_client_id',
|
||||
'time_entries', 'clients',
|
||||
['client_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
|
||||
# Add check constraint to ensure either project_id or client_id is provided
|
||||
# Note: PostgreSQL check constraints can't directly check for NULL, so we use a function
|
||||
|
||||
@@ -18,134 +18,69 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Add UI feature flag fields to users table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
|
||||
# Check existing columns (idempotent)
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'users' not in existing_tables:
|
||||
return
|
||||
|
||||
users_columns = {c['name'] for c in inspector.get_columns('users')}
|
||||
|
||||
# Helper function to add column if it doesn't exist
|
||||
def _add_column_if_missing(column_name, description=""):
|
||||
if column_name in users_columns:
|
||||
print(f"✓ Column {column_name} already exists in users table")
|
||||
return
|
||||
try:
|
||||
op.add_column('users', sa.Column(column_name, sa.Boolean(), nullable=False, server_default='1'))
|
||||
print(f"✓ Added {column_name} column to users table{(' - ' + description) if description else ''}")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'already exists' in error_msg.lower() or 'duplicate' in error_msg.lower():
|
||||
print(f"✓ Column {column_name} already exists in users table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning adding {column_name} column: {e}")
|
||||
raise
|
||||
|
||||
# Add UI feature flags to users table
|
||||
# All default to True (enabled) for backward compatibility
|
||||
try:
|
||||
# Show/hide Inventory section in navigation
|
||||
op.add_column('users', sa.Column('ui_show_inventory', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_inventory column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_inventory column: {e}")
|
||||
_add_column_if_missing('ui_show_inventory', 'Show/hide Inventory section in navigation')
|
||||
|
||||
try:
|
||||
# Show/hide Mileage under Finance & Expenses
|
||||
op.add_column('users', sa.Column('ui_show_mileage', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_mileage column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_mileage column: {e}")
|
||||
|
||||
try:
|
||||
# Show/hide Per Diem under Finance & Expenses
|
||||
op.add_column('users', sa.Column('ui_show_per_diem', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_per_diem column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_per_diem column: {e}")
|
||||
|
||||
try:
|
||||
# Show/hide Kanban Board under Time Tracking
|
||||
op.add_column('users', sa.Column('ui_show_kanban_board', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_kanban_board column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_kanban_board column: {e}")
|
||||
_add_column_if_missing('ui_show_mileage', 'Show/hide Mileage under Finance & Expenses')
|
||||
_add_column_if_missing('ui_show_per_diem', 'Show/hide Per Diem under Finance & Expenses')
|
||||
_add_column_if_missing('ui_show_kanban_board', 'Show/hide Kanban Board under Time Tracking')
|
||||
|
||||
# Calendar section
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_calendar', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_calendar column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_calendar column: {e}")
|
||||
_add_column_if_missing('ui_show_calendar', 'Show/hide Calendar section')
|
||||
|
||||
# Time Tracking section items
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_project_templates', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_project_templates column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_project_templates column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_gantt_chart', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_gantt_chart column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_gantt_chart column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_weekly_goals', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_weekly_goals column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_weekly_goals column: {e}")
|
||||
_add_column_if_missing('ui_show_project_templates', 'Show/hide Project Templates')
|
||||
_add_column_if_missing('ui_show_gantt_chart', 'Show/hide Gantt Chart')
|
||||
_add_column_if_missing('ui_show_weekly_goals', 'Show/hide Weekly Goals')
|
||||
_add_column_if_missing('ui_show_issues', 'Show/hide Issues feature')
|
||||
|
||||
# CRM section
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_quotes', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_quotes column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_quotes column: {e}")
|
||||
_add_column_if_missing('ui_show_quotes', 'Show/hide Quotes')
|
||||
|
||||
# Finance & Expenses section items
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_reports', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_reports column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_reports column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_report_builder', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_report_builder column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_report_builder column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_scheduled_reports', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_scheduled_reports column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_scheduled_reports column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_invoice_approvals', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_invoice_approvals column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_invoice_approvals column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_payment_gateways', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_payment_gateways column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_payment_gateways column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_recurring_invoices', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_recurring_invoices column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_recurring_invoices column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_payments', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_payments column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_payments column: {e}")
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_budget_alerts', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_budget_alerts column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_budget_alerts column: {e}")
|
||||
_add_column_if_missing('ui_show_reports', 'Show/hide Reports')
|
||||
_add_column_if_missing('ui_show_report_builder', 'Show/hide Report Builder')
|
||||
_add_column_if_missing('ui_show_scheduled_reports', 'Show/hide Scheduled Reports')
|
||||
_add_column_if_missing('ui_show_invoice_approvals', 'Show/hide Invoice Approvals')
|
||||
_add_column_if_missing('ui_show_payment_gateways', 'Show/hide Payment Gateways')
|
||||
_add_column_if_missing('ui_show_recurring_invoices', 'Show/hide Recurring Invoices')
|
||||
_add_column_if_missing('ui_show_payments', 'Show/hide Payments')
|
||||
_add_column_if_missing('ui_show_budget_alerts', 'Show/hide Budget Alerts')
|
||||
|
||||
# Analytics
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_analytics', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_analytics column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_analytics column: {e}")
|
||||
_add_column_if_missing('ui_show_analytics', 'Show/hide Analytics')
|
||||
|
||||
# Tools & Data section
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_tools', sa.Boolean(), nullable=False, server_default='1'))
|
||||
print("✓ Added ui_show_tools column to users table")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning adding ui_show_tools column: {e}")
|
||||
_add_column_if_missing('ui_show_tools', 'Show/hide Tools & Data section')
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -23,6 +23,7 @@ def upgrade():
|
||||
"""Rename metadata columns to match model definitions"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
table_names = set(inspector.get_table_names())
|
||||
|
||||
# 1. Rename user_badges.metadata -> user_badges.achievement_metadata
|
||||
@@ -30,10 +31,14 @@ def upgrade():
|
||||
user_badges_cols = {c['name'] for c in inspector.get_columns('user_badges')}
|
||||
if 'metadata' in user_badges_cols and 'achievement_metadata' not in user_badges_cols:
|
||||
try:
|
||||
op.alter_column('user_badges', 'metadata',
|
||||
new_column_name='achievement_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.alter_column('metadata', new_column_name='achievement_metadata')
|
||||
else:
|
||||
op.alter_column('user_badges', 'metadata',
|
||||
new_column_name='achievement_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed user_badges.metadata to achievement_metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning renaming user_badges.metadata column: {e}")
|
||||
@@ -49,10 +54,14 @@ def upgrade():
|
||||
leaderboard_entries_cols = {c['name'] for c in inspector.get_columns('leaderboard_entries')}
|
||||
if 'metadata' in leaderboard_entries_cols and 'entry_metadata' not in leaderboard_entries_cols:
|
||||
try:
|
||||
op.alter_column('leaderboard_entries', 'metadata',
|
||||
new_column_name='entry_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.alter_column('metadata', new_column_name='entry_metadata')
|
||||
else:
|
||||
op.alter_column('leaderboard_entries', 'metadata',
|
||||
new_column_name='entry_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed leaderboard_entries.metadata to entry_metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning renaming leaderboard_entries.metadata column: {e}")
|
||||
@@ -68,6 +77,7 @@ def downgrade():
|
||||
"""Rename columns back to original metadata names"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
table_names = set(inspector.get_table_names())
|
||||
|
||||
# 1. Rename user_badges.achievement_metadata back to metadata
|
||||
@@ -75,10 +85,14 @@ def downgrade():
|
||||
user_badges_cols = {c['name'] for c in inspector.get_columns('user_badges')}
|
||||
if 'achievement_metadata' in user_badges_cols and 'metadata' not in user_badges_cols:
|
||||
try:
|
||||
op.alter_column('user_badges', 'achievement_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.alter_column('achievement_metadata', new_column_name='metadata')
|
||||
else:
|
||||
op.alter_column('user_badges', 'achievement_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed user_badges.achievement_metadata back to metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning renaming user_badges.achievement_metadata column: {e}")
|
||||
@@ -94,10 +108,14 @@ def downgrade():
|
||||
leaderboard_entries_cols = {c['name'] for c in inspector.get_columns('leaderboard_entries')}
|
||||
if 'entry_metadata' in leaderboard_entries_cols and 'metadata' not in leaderboard_entries_cols:
|
||||
try:
|
||||
op.alter_column('leaderboard_entries', 'entry_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.alter_column('entry_metadata', new_column_name='metadata')
|
||||
else:
|
||||
op.alter_column('leaderboard_entries', 'entry_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed leaderboard_entries.entry_metadata back to metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Warning renaming leaderboard_entries.entry_metadata column: {e}")
|
||||
|
||||
@@ -25,6 +25,7 @@ def upgrade():
|
||||
"""Ensure metadata columns have correct names"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
table_names = set(inspector.get_table_names())
|
||||
|
||||
# 1. Fix user_badges.achievement_metadata
|
||||
@@ -36,25 +37,37 @@ def upgrade():
|
||||
elif 'metadata' in user_badges_cols:
|
||||
# Rename metadata to achievement_metadata
|
||||
try:
|
||||
op.alter_column('user_badges', 'metadata',
|
||||
new_column_name='achievement_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.alter_column('metadata', new_column_name='achievement_metadata')
|
||||
else:
|
||||
op.alter_column('user_badges', 'metadata',
|
||||
new_column_name='achievement_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed user_badges.metadata to achievement_metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error renaming user_badges.metadata: {e}")
|
||||
# If rename fails, try adding the column instead
|
||||
try:
|
||||
op.add_column('user_badges',
|
||||
sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
else:
|
||||
op.add_column('user_badges',
|
||||
sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
print("✓ Added user_badges.achievement_metadata column")
|
||||
except Exception as e2:
|
||||
print(f"⚠ Error adding user_badges.achievement_metadata: {e2}")
|
||||
else:
|
||||
# Neither column exists, add the correct one
|
||||
try:
|
||||
op.add_column('user_badges',
|
||||
sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
else:
|
||||
op.add_column('user_badges',
|
||||
sa.Column('achievement_metadata', sa.JSON(), nullable=True))
|
||||
print("✓ Added user_badges.achievement_metadata column")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error adding user_badges.achievement_metadata: {e}")
|
||||
@@ -68,25 +81,37 @@ def upgrade():
|
||||
elif 'metadata' in leaderboard_entries_cols:
|
||||
# Rename metadata to entry_metadata
|
||||
try:
|
||||
op.alter_column('leaderboard_entries', 'metadata',
|
||||
new_column_name='entry_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.alter_column('metadata', new_column_name='entry_metadata')
|
||||
else:
|
||||
op.alter_column('leaderboard_entries', 'metadata',
|
||||
new_column_name='entry_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed leaderboard_entries.metadata to entry_metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error renaming leaderboard_entries.metadata: {e}")
|
||||
# If rename fails, try adding the column instead
|
||||
try:
|
||||
op.add_column('leaderboard_entries',
|
||||
sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
else:
|
||||
op.add_column('leaderboard_entries',
|
||||
sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
print("✓ Added leaderboard_entries.entry_metadata column")
|
||||
except Exception as e2:
|
||||
print(f"⚠ Error adding leaderboard_entries.entry_metadata: {e2}")
|
||||
else:
|
||||
# Neither column exists, add the correct one
|
||||
try:
|
||||
op.add_column('leaderboard_entries',
|
||||
sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
else:
|
||||
op.add_column('leaderboard_entries',
|
||||
sa.Column('entry_metadata', sa.JSON(), nullable=True))
|
||||
print("✓ Added leaderboard_entries.entry_metadata column")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error adding leaderboard_entries.entry_metadata: {e}")
|
||||
@@ -96,6 +121,7 @@ def downgrade():
|
||||
"""Revert column names back to metadata (if needed)"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
table_names = set(inspector.get_table_names())
|
||||
|
||||
# 1. Revert user_badges.achievement_metadata back to metadata
|
||||
@@ -103,10 +129,14 @@ def downgrade():
|
||||
user_badges_cols = {c['name'] for c in inspector.get_columns('user_badges')}
|
||||
if 'achievement_metadata' in user_badges_cols and 'metadata' not in user_badges_cols:
|
||||
try:
|
||||
op.alter_column('user_badges', 'achievement_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('user_badges', schema=None) as batch_op:
|
||||
batch_op.alter_column('achievement_metadata', new_column_name='metadata')
|
||||
else:
|
||||
op.alter_column('user_badges', 'achievement_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed user_badges.achievement_metadata back to metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error reverting user_badges.achievement_metadata: {e}")
|
||||
@@ -116,10 +146,14 @@ def downgrade():
|
||||
leaderboard_entries_cols = {c['name'] for c in inspector.get_columns('leaderboard_entries')}
|
||||
if 'entry_metadata' in leaderboard_entries_cols and 'metadata' not in leaderboard_entries_cols:
|
||||
try:
|
||||
op.alter_column('leaderboard_entries', 'entry_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
if is_sqlite:
|
||||
with op.batch_alter_table('leaderboard_entries', schema=None) as batch_op:
|
||||
batch_op.alter_column('entry_metadata', new_column_name='metadata')
|
||||
else:
|
||||
op.alter_column('leaderboard_entries', 'entry_metadata',
|
||||
new_column_name='metadata',
|
||||
existing_type=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
print("✓ Renamed leaderboard_entries.entry_metadata back to metadata")
|
||||
except Exception as e:
|
||||
print(f"⚠ Error reverting leaderboard_entries.entry_metadata: {e}")
|
||||
|
||||
@@ -21,6 +21,12 @@ depends_on = None
|
||||
def upgrade():
|
||||
"""Fix sequences for roles and permissions tables"""
|
||||
connection = op.get_bind()
|
||||
is_postgresql = connection.dialect.name == 'postgresql'
|
||||
|
||||
# SQLite doesn't use sequences - it uses AUTOINCREMENT which is automatically managed
|
||||
# This migration only applies to PostgreSQL
|
||||
if not is_postgresql:
|
||||
return
|
||||
|
||||
# Fix roles sequence
|
||||
# Create sequence if it doesn't exist, link it to the table, then set it to max_id + 1
|
||||
|
||||
@@ -32,6 +32,20 @@ def upgrade():
|
||||
"""Add iterative report generation and email distribution fields"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_postgresql = bind.dialect.name == 'postgresql'
|
||||
|
||||
# Fix alembic_version.version_num column size if it's too small
|
||||
# Some revision IDs are longer than the default VARCHAR(32)
|
||||
# This is needed for PostgreSQL; SQLite doesn't enforce VARCHAR lengths
|
||||
if is_postgresql and 'alembic_version' in inspector.get_table_names():
|
||||
try:
|
||||
# Try to alter the column to VARCHAR(50) to accommodate longer revision IDs
|
||||
# This is idempotent - if it's already VARCHAR(50) or larger, it will just work
|
||||
op.execute("ALTER TABLE alembic_version ALTER COLUMN version_num TYPE VARCHAR(50)")
|
||||
except Exception:
|
||||
# Column might already be the right size, or alteration might have failed
|
||||
# In either case, we'll continue - this is best-effort
|
||||
pass
|
||||
|
||||
# Add iterative report generation to saved_report_views
|
||||
if 'saved_report_views' in inspector.get_table_names():
|
||||
|
||||
@@ -105,6 +105,7 @@ def upgrade():
|
||||
|
||||
# Users table - Time Tracking section (additional)
|
||||
_add_bool_column("users", "ui_show_time_entry_templates")
|
||||
_add_bool_column("users", "ui_show_issues") # Missing from migration 077
|
||||
|
||||
# Users table - Advanced features
|
||||
_add_bool_column("users", "ui_show_workflows")
|
||||
|
||||
@@ -24,6 +24,7 @@ def upgrade():
|
||||
"""
|
||||
bind = op.get_bind()
|
||||
inspector = sa.inspect(bind)
|
||||
is_sqlite = bind.dialect.name == 'sqlite'
|
||||
|
||||
# Check if settings table exists
|
||||
table_names = set(inspector.get_table_names())
|
||||
@@ -71,26 +72,42 @@ def upgrade():
|
||||
"ui_allow_kiosk",
|
||||
]
|
||||
|
||||
# Helper to drop a column if it exists
|
||||
def _drop_column_if_exists(table_name: str, column_name: str):
|
||||
try:
|
||||
current_cols = {c['name'] for c in inspector.get_columns(table_name)}
|
||||
if column_name in current_cols:
|
||||
op.drop_column(table_name, column_name)
|
||||
print(f"✓ Dropped {column_name} column from {table_name} table")
|
||||
else:
|
||||
print(f"⊘ Column {column_name} does not exist in {table_name} table, skipping")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
# Column might already be dropped or not exist
|
||||
if 'does not exist' in error_msg.lower() or 'no such column' in error_msg.lower():
|
||||
print(f"⊘ Column {column_name} does not exist in {table_name} table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning: Could not drop {column_name} column: {e}")
|
||||
# Get existing columns
|
||||
current_cols = {c['name'] for c in inspector.get_columns('settings')}
|
||||
|
||||
# Filter to only columns that exist
|
||||
columns_to_drop = [col for col in ui_allow_columns if col in current_cols]
|
||||
|
||||
if not columns_to_drop:
|
||||
print("⊘ No ui_allow_ columns to remove from settings table")
|
||||
return
|
||||
|
||||
# Drop all ui_allow_ columns
|
||||
for column_name in ui_allow_columns:
|
||||
_drop_column_if_exists("settings", column_name)
|
||||
# Drop columns using batch mode for SQLite
|
||||
if is_sqlite:
|
||||
# SQLite requires batch mode for dropping columns
|
||||
with op.batch_alter_table('settings', schema=None) as batch_op:
|
||||
for column_name in columns_to_drop:
|
||||
try:
|
||||
batch_op.drop_column(column_name)
|
||||
print(f"✓ Dropped {column_name} column from settings table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'does not exist' in error_msg.lower() or 'no such column' in error_msg.lower():
|
||||
print(f"⊘ Column {column_name} does not exist in settings table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning: Could not drop {column_name} column: {e}")
|
||||
else:
|
||||
# PostgreSQL and other databases can use direct drop_column
|
||||
for column_name in columns_to_drop:
|
||||
try:
|
||||
op.drop_column('settings', column_name)
|
||||
print(f"✓ Dropped {column_name} column from settings table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'does not exist' in error_msg.lower() or 'no such column' in error_msg.lower():
|
||||
print(f"⊘ Column {column_name} does not exist in settings table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning: Could not drop {column_name} column: {e}")
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -19,20 +19,56 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
"""Create donation_interactions table to track user interactions with donation prompts"""
|
||||
op.create_table(
|
||||
"donation_interactions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("interaction_type", sa.String(length=50), nullable=False),
|
||||
sa.Column("source", sa.String(length=100), nullable=True),
|
||||
sa.Column("time_entries_count", sa.Integer(), nullable=True),
|
||||
sa.Column("days_since_signup", sa.Integer(), nullable=True),
|
||||
sa.Column("total_hours", sa.Float(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.text("CURRENT_TIMESTAMP")),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("idx_donation_interactions_user_id", "donation_interactions", ["user_id"])
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
|
||||
# Add missing ui_show_issues column to users table if it doesn't exist
|
||||
# This column was missing from migration 077 and should have been in 092
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'users' in existing_tables:
|
||||
users_columns = {c['name'] for c in inspector.get_columns('users')}
|
||||
if 'ui_show_issues' not in users_columns:
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
bool_true_default = '1' if dialect_name == 'sqlite' else ('true' if dialect_name == 'postgresql' else '1')
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_issues', sa.Boolean(), nullable=False, server_default=sa.text(bool_true_default)))
|
||||
print("✓ Added ui_show_issues column to users table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'already exists' in error_msg.lower() or 'duplicate' in error_msg.lower():
|
||||
print("✓ Column ui_show_issues already exists in users table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning adding ui_show_issues column: {e}")
|
||||
|
||||
# Create donation_interactions table (idempotent)
|
||||
if 'donation_interactions' not in existing_tables:
|
||||
op.create_table(
|
||||
"donation_interactions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("interaction_type", sa.String(length=50), nullable=False),
|
||||
sa.Column("source", sa.String(length=100), nullable=True),
|
||||
sa.Column("time_entries_count", sa.Integer(), nullable=True),
|
||||
sa.Column("days_since_signup", sa.Integer(), nullable=True),
|
||||
sa.Column("total_hours", sa.Float(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.text("CURRENT_TIMESTAMP")),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# Create index (idempotent)
|
||||
try:
|
||||
op.create_index("idx_donation_interactions_user_id", "donation_interactions", ["user_id"])
|
||||
except Exception:
|
||||
pass # Index might already exist
|
||||
else:
|
||||
# Table exists, ensure index exists
|
||||
try:
|
||||
existing_indexes = [idx['name'] for idx in inspector.get_indexes('donation_interactions')]
|
||||
if 'idx_donation_interactions_user_id' not in existing_indexes:
|
||||
op.create_index("idx_donation_interactions_user_id", "donation_interactions", ["user_id"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
77
migrations/versions/095_add_missing_ui_show_issues.py
Normal file
77
migrations/versions/095_add_missing_ui_show_issues.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Add missing ui_show_issues column to users table
|
||||
|
||||
Revision ID: 095_add_missing_ui_show_issues
|
||||
Revises: 094_add_donation_interactions
|
||||
Create Date: 2025-12-31 14:25:00
|
||||
|
||||
This migration adds the missing ui_show_issues column that was expected by the User model
|
||||
but was not included in migration 077.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '095_add_missing_ui_show_issues'
|
||||
down_revision = '094_add_donation_interactions'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add missing ui_show_issues column to users table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'users' not in existing_tables:
|
||||
return
|
||||
|
||||
users_columns = {c['name'] for c in inspector.get_columns('users')}
|
||||
|
||||
if 'ui_show_issues' in users_columns:
|
||||
print("✓ Column ui_show_issues already exists in users table")
|
||||
return
|
||||
|
||||
# Determine database dialect for proper default values
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
bool_true_default = '1' if dialect_name == 'sqlite' else ('true' if dialect_name == 'postgresql' else '1')
|
||||
|
||||
try:
|
||||
op.add_column('users', sa.Column('ui_show_issues', sa.Boolean(), nullable=False, server_default=sa.text(bool_true_default)))
|
||||
print("✓ Added ui_show_issues column to users table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'already exists' in error_msg.lower() or 'duplicate' in error_msg.lower():
|
||||
print("✓ Column ui_show_issues already exists in users table (detected via error)")
|
||||
else:
|
||||
print(f"✗ Error adding ui_show_issues column: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove ui_show_issues column from users table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'users' not in existing_tables:
|
||||
return
|
||||
|
||||
users_columns = {c['name'] for c in inspector.get_columns('users')}
|
||||
|
||||
if 'ui_show_issues' not in users_columns:
|
||||
print("⊘ Column ui_show_issues does not exist in users table, skipping")
|
||||
return
|
||||
|
||||
try:
|
||||
op.drop_column('users', 'ui_show_issues')
|
||||
print("✓ Dropped ui_show_issues column from users table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'does not exist' in error_msg.lower() or 'no such column' in error_msg.lower():
|
||||
print("⊘ Column ui_show_issues does not exist in users table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning: Could not drop ui_show_issues column: {e}")
|
||||
77
migrations/versions/096_add_missing_portal_issues_enabled.py
Normal file
77
migrations/versions/096_add_missing_portal_issues_enabled.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Add missing portal_issues_enabled column to clients table
|
||||
|
||||
Revision ID: 096_add_missing_portal_issues_enabled
|
||||
Revises: 095_add_missing_ui_show_issues
|
||||
Create Date: 2026-01-01 08:30:00
|
||||
|
||||
This migration adds the missing portal_issues_enabled column that was expected by the Client model
|
||||
but was not included in migration 048.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '096_add_missing_portal_issues_enabled'
|
||||
down_revision = '095_add_missing_ui_show_issues'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add missing portal_issues_enabled column to clients table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'clients' not in existing_tables:
|
||||
return
|
||||
|
||||
clients_columns = {c['name'] for c in inspector.get_columns('clients')}
|
||||
|
||||
if 'portal_issues_enabled' in clients_columns:
|
||||
print("✓ Column portal_issues_enabled already exists in clients table")
|
||||
return
|
||||
|
||||
# Determine database dialect for proper default values
|
||||
dialect_name = bind.dialect.name if bind else 'generic'
|
||||
bool_true_default = '1' if dialect_name == 'sqlite' else ('true' if dialect_name == 'postgresql' else '1')
|
||||
|
||||
try:
|
||||
op.add_column('clients', sa.Column('portal_issues_enabled', sa.Boolean(), nullable=False, server_default=sa.text(bool_true_default)))
|
||||
print("✓ Added portal_issues_enabled column to clients table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'already exists' in error_msg.lower() or 'duplicate' in error_msg.lower():
|
||||
print("✓ Column portal_issues_enabled already exists in clients table (detected via error)")
|
||||
else:
|
||||
print(f"✗ Error adding portal_issues_enabled column: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove portal_issues_enabled column from clients table"""
|
||||
from sqlalchemy import inspect
|
||||
bind = op.get_bind()
|
||||
inspector = inspect(bind)
|
||||
|
||||
existing_tables = inspector.get_table_names()
|
||||
if 'clients' not in existing_tables:
|
||||
return
|
||||
|
||||
clients_columns = {c['name'] for c in inspector.get_columns('clients')}
|
||||
|
||||
if 'portal_issues_enabled' not in clients_columns:
|
||||
print("⊘ Column portal_issues_enabled does not exist in clients table, skipping")
|
||||
return
|
||||
|
||||
try:
|
||||
op.drop_column('clients', 'portal_issues_enabled')
|
||||
print("✓ Dropped portal_issues_enabled column from clients table")
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
if 'does not exist' in error_msg.lower() or 'no such column' in error_msg.lower():
|
||||
print("⊘ Column portal_issues_enabled does not exist in clients table (detected via error)")
|
||||
else:
|
||||
print(f"⚠ Warning: Could not drop portal_issues_enabled column: {e}")
|
||||
Reference in New Issue
Block a user