[V0.5] Working application with all 4 screens as of yet.
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,6 +1,8 @@
|
|||||||
# backend
|
# backend
|
||||||
backend/env
|
backend/env
|
||||||
backend/.env
|
backend/.env
|
||||||
|
backend/db
|
||||||
|
backend/redis_data
|
||||||
|
|
||||||
# frontend
|
# frontend
|
||||||
interfaces/nativeapp/node_modules
|
interfaces/nativeapp/node_modules
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
POSTGRES_USER = "maia"
|
DB_HOST = "db"
|
||||||
POSTGRES_PASSWORD = "maia"
|
DB_USER = "maia"
|
||||||
|
DB_PASSWORD = "maia"
|
||||||
|
DB_NAME = "maia"
|
||||||
PEPPER = "LsD7%"
|
PEPPER = "LsD7%"
|
||||||
JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf"
|
JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf"
|
||||||
GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk"
|
GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk"
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ version_path_separator = os
|
|||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
# sqlalchemy.url = postgresql://user:pass@localhost/dbname
|
# sqlalchemy.url = postgresql://user:pass@localhost/dbname
|
||||||
sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
# sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from logging.config import fileConfig
|
|||||||
|
|
||||||
from sqlalchemy import engine_from_config
|
from sqlalchemy import engine_from_config
|
||||||
from sqlalchemy import pool
|
from sqlalchemy import pool
|
||||||
|
from sqlalchemy import create_engine # Add create_engine import
|
||||||
|
|
||||||
from alembic import context
|
from alembic import context
|
||||||
|
|
||||||
@@ -25,6 +26,29 @@ config = context.config
|
|||||||
if config.config_file_name is not None:
|
if config.config_file_name is not None:
|
||||||
fileConfig(config.config_file_name)
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# --- Construct DB URL from environment variables ---
|
||||||
|
# Use environment variables similar to docker-compose
|
||||||
|
db_user = os.getenv("POSTGRES_USER", "maia") # Default to 'maia' if not set
|
||||||
|
db_password = os.getenv("POSTGRES_PASSWORD", "maia") # Default to 'maia' if not set
|
||||||
|
db_host = os.getenv("DB_HOST", "db") # Default to 'db' service name
|
||||||
|
db_port = os.getenv("DB_PORT", "5432") # Default to '5432'
|
||||||
|
db_name = os.getenv("DB_NAME", "maia") # Default to 'maia'
|
||||||
|
|
||||||
|
# Construct the URL, falling back to alembic.ini if needed
|
||||||
|
url = os.getenv("DB_URL")
|
||||||
|
if not url:
|
||||||
|
# Try constructing from parts if DB_URL isn't set
|
||||||
|
url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||||
|
# As a final fallback, use the URL from alembic.ini
|
||||||
|
config_url = config.get_main_option("sqlalchemy.url")
|
||||||
|
if not url and config_url:
|
||||||
|
url = config_url
|
||||||
|
|
||||||
|
# Update the config object so engine_from_config can potentially use it,
|
||||||
|
# though we'll primarily use the constructed 'url' directly.
|
||||||
|
config.set_main_option("sqlalchemy.url", url)
|
||||||
|
# ----------------------------------------------------
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here
|
||||||
# for 'autogenerate' support
|
# for 'autogenerate' support
|
||||||
# from myapp import mymodel
|
# from myapp import mymodel
|
||||||
@@ -51,9 +75,8 @@ def run_migrations_offline() -> None:
|
|||||||
script output.
|
script output.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
context.configure(
|
||||||
url=url,
|
url=url, # Use the constructed URL
|
||||||
target_metadata=target_metadata,
|
target_metadata=target_metadata,
|
||||||
literal_binds=True,
|
literal_binds=True,
|
||||||
dialect_opts={"paramstyle": "named"},
|
dialect_opts={"paramstyle": "named"},
|
||||||
@@ -70,11 +93,14 @@ def run_migrations_online() -> None:
|
|||||||
and associate a connection with the context.
|
and associate a connection with the context.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
connectable = engine_from_config(
|
# Create engine directly using the constructed URL
|
||||||
config.get_section(config.config_ini_section, {}),
|
connectable = create_engine(url, poolclass=pool.NullPool)
|
||||||
prefix="sqlalchemy.",
|
# Original approach using engine_from_config:
|
||||||
poolclass=pool.NullPool,
|
# connectable = engine_from_config(
|
||||||
)
|
# config.get_section(config.config_ini_section, {}),
|
||||||
|
# prefix="sqlalchemy.",
|
||||||
|
# poolclass=pool.NullPool,
|
||||||
|
# )
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
with connectable.connect() as connection:
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|||||||
@@ -0,0 +1,94 @@
|
|||||||
|
"""Add all_day column to calendar_events
|
||||||
|
|
||||||
|
Revision ID: a34d847510da
|
||||||
|
Revises: 9a82960db482
|
||||||
|
Create Date: 2025-04-26 11:09:35.400748
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'a34d847510da'
|
||||||
|
down_revision: Union[str, None] = '9a82960db482'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('calendar_events')
|
||||||
|
op.drop_table('users')
|
||||||
|
op.drop_index('ix_todos_id', table_name='todos')
|
||||||
|
op.drop_index('ix_todos_task', table_name='todos')
|
||||||
|
op.drop_table('todos')
|
||||||
|
op.drop_table('token_blacklist')
|
||||||
|
op.drop_index('ix_chat_messages_id', table_name='chat_messages')
|
||||||
|
op.drop_index('ix_chat_messages_user_id', table_name='chat_messages')
|
||||||
|
op.drop_table('chat_messages')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('chat_messages',
|
||||||
|
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('sender', postgresql.ENUM('USER', 'AI', name='messagesender'), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('text', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('timestamp', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='chat_messages_user_id_fkey'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='chat_messages_pkey')
|
||||||
|
)
|
||||||
|
op.create_index('ix_chat_messages_user_id', 'chat_messages', ['user_id'], unique=False)
|
||||||
|
op.create_index('ix_chat_messages_id', 'chat_messages', ['id'], unique=False)
|
||||||
|
op.create_table('token_blacklist',
|
||||||
|
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('token', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('expires_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='token_blacklist_pkey'),
|
||||||
|
sa.UniqueConstraint('token', name='token_blacklist_token_key')
|
||||||
|
)
|
||||||
|
op.create_table('todos',
|
||||||
|
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('task', sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('remind', sa.BOOLEAN(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('complete', sa.BOOLEAN(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('owner_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], name='todos_owner_id_fkey'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='todos_pkey')
|
||||||
|
)
|
||||||
|
op.create_index('ix_todos_task', 'todos', ['task'], unique=False)
|
||||||
|
op.create_index('ix_todos_id', 'todos', ['id'], unique=False)
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('users_id_seq'::regclass)"), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('uuid', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('username', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('role', postgresql.ENUM('ADMIN', 'USER', name='userrole'), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('hashed_password', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='users_pkey'),
|
||||||
|
sa.UniqueConstraint('username', name='users_username_key'),
|
||||||
|
sa.UniqueConstraint('uuid', name='users_uuid_key'),
|
||||||
|
postgresql_ignore_search_path=False
|
||||||
|
)
|
||||||
|
op.create_table('calendar_events',
|
||||||
|
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('description', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('start', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column('end', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('location', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('tags', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('color', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='calendar_events_user_id_fkey'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='calendar_events_pkey')
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
Binary file not shown.
Binary file not shown.
@@ -6,8 +6,14 @@ DOTENV_PATH = os.path.join(os.path.dirname(__file__), "../.env")
|
|||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
# Database settings - reads DB_URL from environment or .env
|
# Database settings - reads from environment or .env
|
||||||
DB_URL: str = "postgresql://maia:maia@localhost:5432/maia"
|
DB_PORT: int = 5432
|
||||||
|
DB_NAME: str = "maia"
|
||||||
|
DB_HOST: str
|
||||||
|
DB_USER: str
|
||||||
|
DB_PASSWORD: str
|
||||||
|
|
||||||
|
DB_URL: str = ""
|
||||||
|
|
||||||
# Redis settings - reads REDIS_URL from environment or .env, also used for Celery.
|
# Redis settings - reads REDIS_URL from environment or .env, also used for Celery.
|
||||||
REDIS_URL: str = "redis://localhost:6379/0"
|
REDIS_URL: str = "redis://localhost:6379/0"
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ Base = declarative_base() # Used for models
|
|||||||
_engine = None
|
_engine = None
|
||||||
_SessionLocal = None
|
_SessionLocal = None
|
||||||
|
|
||||||
|
settings.DB_URL = f"postgresql://{settings.DB_USER}:{settings.DB_PASSWORD}@{settings.DB_HOST}:{settings.DB_PORT}/{settings.DB_NAME}"
|
||||||
|
|
||||||
def get_engine():
|
def get_engine():
|
||||||
global _engine
|
global _engine
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
# docker-compose.yml
|
# docker-compose.yml
|
||||||
|
|
||||||
|
###################
|
||||||
|
### DEV COMPOSE ###
|
||||||
|
###################
|
||||||
services:
|
services:
|
||||||
# ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
|
# ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
|
||||||
api:
|
api:
|
||||||
@@ -11,9 +15,6 @@ services:
|
|||||||
- .:/app
|
- .:/app
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
environment:
|
|
||||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
|
||||||
- REDIS_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
- redis
|
- redis
|
||||||
@@ -32,9 +33,6 @@ services:
|
|||||||
command: celery -A core.celery_app worker --loglevel=info
|
command: celery -A core.celery_app worker --loglevel=info
|
||||||
volumes:
|
volumes:
|
||||||
- .:/app
|
- .:/app
|
||||||
environment:
|
|
||||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
|
||||||
- REDIS_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
- redis
|
- redis
|
||||||
@@ -49,11 +47,11 @@ services:
|
|||||||
image: postgres:15 # Use a specific version
|
image: postgres:15 # Use a specific version
|
||||||
container_name: MAIA-DB
|
container_name: MAIA-DB
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data # Persist data using a named volume
|
- ./db:/var/lib/postgresql/data # Persist data using a named volume
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=${POSTGRES_USER}
|
- POSTGRES_USER=${DB_USER}
|
||||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
- POSTGRES_DB=maia
|
- POSTGRES_DB=${DB_NAME}
|
||||||
env_file:
|
env_file:
|
||||||
- ./.env
|
- ./.env
|
||||||
networks:
|
networks:
|
||||||
@@ -65,16 +63,11 @@ services:
|
|||||||
image: redis:7 # Use a specific version
|
image: redis:7 # Use a specific version
|
||||||
container_name: MAIA-Redis
|
container_name: MAIA-Redis
|
||||||
volumes:
|
volumes:
|
||||||
- redis_data:/data
|
- ./redis_data:/data
|
||||||
networks:
|
networks:
|
||||||
- maia_network
|
- maia_network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# ----- Volumes Definition -----
|
|
||||||
volumes:
|
|
||||||
postgres_data: # Define the named volume for PostgreSQL
|
|
||||||
redis_data: # Define the named volume for Redis
|
|
||||||
|
|
||||||
# ----- Network Definition -----
|
# ----- Network Definition -----
|
||||||
networks:
|
networks:
|
||||||
maia_network: # Define a custom bridge network
|
maia_network: # Define a custom bridge network
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ logging.getLogger("passlib").setLevel(logging.ERROR) # fix bc package logging i
|
|||||||
|
|
||||||
|
|
||||||
def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]:
|
def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]:
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
# Base.metadata.drop_all(bind=get_engine())
|
# Base.metadata.drop_all(bind=get_engine())
|
||||||
@@ -29,6 +28,7 @@ app.add_middleware(
|
|||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=[
|
allow_origins=[
|
||||||
"https://maia.depaoli.id.au",
|
"https://maia.depaoli.id.au",
|
||||||
|
"http://localhost:8081",
|
||||||
],
|
],
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
allow_methods=["*"],
|
allow_methods=["*"],
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
1
interfaces/nativeapp/.env
Normal file
1
interfaces/nativeapp/.env
Normal file
@@ -0,0 +1 @@
|
|||||||
|
EXPO_PUBLIC_API_URL='http://localhost:8000/api'
|
||||||
Reference in New Issue
Block a user