Files
MAIA/backend/alembic/env.py
c-d-p 62d6b8bdfd [V1.0] Working application, added notifications.
Ready to upload to store.
2025-04-27 00:39:52 +02:00

115 lines
3.7 KiB
Python

import os
import sys
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy import create_engine # Add create_engine import
from alembic import context
from core.database import Base # Import your Base
# --- Add project root to sys.path ---
# This assumes alembic/env.py is one level down from the project root (backend/)
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, PROJECT_DIR)
# -----------------------------------
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# --- Construct DB URL from environment variables ---
# Use environment variables similar to docker-compose
db_user = os.getenv("POSTGRES_USER", "maia") # Default to 'maia' if not set
db_password = os.getenv("POSTGRES_PASSWORD", "maia") # Default to 'maia' if not set
db_host = os.getenv("DB_HOST", "db") # Default to 'db' service name
db_port = os.getenv("DB_PORT", "5432") # Default to '5432'
db_name = os.getenv("DB_NAME", "maia") # Default to 'maia'
# Construct the URL, falling back to alembic.ini if needed
url = os.getenv("DB_URL")
if not url:
# Try constructing from parts if DB_URL isn't set
url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
# As a final fallback, use the URL from alembic.ini
config_url = config.get_main_option("sqlalchemy.url")
if not url and config_url:
url = config_url
# Update the config object so engine_from_config can potentially use it,
# though we'll primarily use the constructed 'url' directly.
config.set_main_option("sqlalchemy.url", url)
# ----------------------------------------------------
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# --- Set target_metadata ---
target_metadata = Base.metadata
# ---------------------------
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=url, # Use the constructed URL
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# Create engine directly using the constructed URL
connectable = create_engine(url, poolclass=pool.NullPool)
# Original approach using engine_from_config:
# connectable = engine_from_config(
# config.get_section(config.config_ini_section, {}),
# prefix="sqlalchemy.",
# poolclass=pool.NullPool,
# )
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()