diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..a27b975 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,90 @@ +# .github/workflows/deploy.yml + +name: Build and Deploy Backend +on: + # Triggers the workflow on push events but only for the main branch + push: + branches: [ main ] + paths: # Only run if backend code or Docker config changes + - 'backend/**' + - '.github/workflows/deploy.yml' + - 'backend/docker-compose.yml' + + # Allows running of this workflow manually from the Actions tab + workflow_dispatch: + + # Ensures the project will never be out of date by running a cron for this job + # Currently set to every Sunday at 3 AM UTC + schedule: + - cron: '0 3 * * 0' + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + # Checks out the repo under $GITHUB_WORKSPACE + - name: Checkout code + uses: actions/checkout@v4 + + # ------------------------------------------------------------------ + # Login to Container Registry (Using GHCR) + # ------------------------------------------------------------------ + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} # GitHub username + password: ${{ secrets.DOCKER_REGISTRY_TOKEN }} # Uses the PAT stored in secrets + + # ------------------------------------------------------------------ + # Set up Docker Buildx for advanced build features + # ------------------------------------------------------------------ + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # ------------------------------------------------------------------ + # Build and Push Docker Image + # ------------------------------------------------------------------ + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: ./backend + file: ./backend/Dockerfile # Explicit path to Dockerfile + push: true # Push the image after building + tags: | # Use SHA for version specific, latest for general + ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }} + ghcr.io/${{ github.repository_owner }}/MAIA:latest + # Pull latest base image updates when building (good for scheduled runs) + pull: true + cache-from: type=gha # Github Actions cache + cache-to: type=gha,mode=max + + # ------------------------------------------------------------------ + # Deploy to mara via SSH + # ------------------------------------------------------------------ + - name: Deploy to Server + uses: appleboy/ssh-action@v1.0.3 + with: + host: ${{ secrets.SSH_HOST }} + username: ${{ secrets.SSH_USER }} + key: ${{ secrets.SSH_PRIVATE_KEY }} + script: | + set -e # Exit script on first error + cd ${{ secrets.DEPLOY_PATH }} + echo "Logged into server: $(pwd)" + + # Pull the specific image version built in this workflow + # Using the Git SHA ensures we deploy exactly what was just built + docker pull ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }} + + # Also pull latest for other services to keep up to date + docker-compose pull redis db + + # Uses sed to update the compose file with the new image tag + sed -i 's|image: ghcr.io/${{ github.repository_owner }}/MAIA:.*|image: ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }}|g' docker-compose.yml + echo "Updated docker-compose.yml image tag" + + # Restart the services using the new image(s) + echo "Bringing compose stack down and up with new image..." + docker-compose up -d --force-recreate --remove-orphans api worker db redis + echo "Deployment complete!" \ No newline at end of file diff --git a/backend/.env b/backend/.env index d0c81d1..6530144 100644 --- a/backend/.env +++ b/backend/.env @@ -1,3 +1,5 @@ +POSTGRES_USER = "maia" +POSTGRES_PASSWORD = "maia" PEPPER = "LsD7%" JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf" -GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk" \ No newline at end of file +GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk" diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..84b4a3f --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,19 @@ +# backend/Dockerfile +FROM python:3.12-slim + +WORKDIR /app + +# Set environment variables to prevent buffering issues with logs +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# Install dependencies +COPY ./requirements.txt /app/requirements.txt +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt + +# Copy application code (AFTER installing dependencies for better caching) +COPY . /app/ + +RUN adduser --disabled-password --gecos "" appuser && chown -R appuser /app +USER appuser \ No newline at end of file diff --git a/backend/core/__init__.py b/backend/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/core/__pycache__/__init__.cpython-312.pyc b/backend/core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1415239 Binary files /dev/null and b/backend/core/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/core/__pycache__/celery_app.cpython-312.pyc b/backend/core/__pycache__/celery_app.cpython-312.pyc index 9e9d3f7..8919cbe 100644 Binary files a/backend/core/__pycache__/celery_app.cpython-312.pyc and b/backend/core/__pycache__/celery_app.cpython-312.pyc differ diff --git a/backend/core/__pycache__/config.cpython-312.pyc b/backend/core/__pycache__/config.cpython-312.pyc index fd74792..3a175ae 100644 Binary files a/backend/core/__pycache__/config.cpython-312.pyc and b/backend/core/__pycache__/config.cpython-312.pyc differ diff --git a/backend/core/celery_app.py b/backend/core/celery_app.py index 8057de0..51e7355 100644 --- a/backend/core/celery_app.py +++ b/backend/core/celery_app.py @@ -1,10 +1,14 @@ # core/celery_app.py from celery import Celery -from core.config import settings +from core.config import settings # Import your settings -celery = Celery( - "maia", - broker=f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/0", - backend=f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/1", - include=["modules.auth.tasks"], # List all task modules here +celery_app = Celery( + "worker", + broker=settings.REDIS_URL, + backend=settings.REDIS_URL, + include=["modules.auth.tasks", "modules.admin.tasks"] # Add paths to modules containing tasks + # Add other modules with tasks here, e.g., "modules.some_other_module.tasks" ) + +# Optional: Update Celery configuration directly if needed +# celery_app.conf.update(task_track_started=True) \ No newline at end of file diff --git a/backend/core/config.py b/backend/core/config.py index a0794b9..80c6211 100644 --- a/backend/core/config.py +++ b/backend/core/config.py @@ -1,24 +1,30 @@ # core/config.py from pydantic_settings import BaseSettings -from os import getenv -from dotenv import load_dotenv - -load_dotenv() # Load .env file +from pydantic import Field # Import Field for potential default values if needed +import os class Settings(BaseSettings): - DB_URL: str = "postgresql://maia:maia@localhost:5432/maia" + # Database settings - reads DB_URL from environment or .env + DB_URL: str - REDIS_HOST: str = "localhost" - REDIS_PORT: int = 6379 + # Redis settings - reads REDIS_URL from environment or .env, also used for Celery. + REDIS_URL: str + # JWT settings - reads from environment or .env JWT_ALGORITHM: str = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 - # ACCESS_TOKEN_EXPIRE_MINUTES: int = 1 REFRESH_TOKEN_EXPIRE_DAYS: int = 7 - - PEPPER: str = getenv("PEPPER", "") - JWT_SECRET_KEY: str = getenv("JWT_SECRET_KEY", "") + PEPPER: str + JWT_SECRET_KEY: str - GOOGLE_API_KEY: str = getenv("GOOGLE_API_KEY", "") + # Other settings + GOOGLE_API_KEY: str = "" # Example with a default + class Config: + # Tell pydantic-settings to load variables from a .env file + env_file = '.env' + env_file_encoding = 'utf-8' + extra = 'ignore' + +# Create a single instance of the settings settings = Settings() diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml index 2d9c274..f834afc 100644 --- a/backend/docker-compose.yml +++ b/backend/docker-compose.yml @@ -1,23 +1,81 @@ # docker-compose.yml services: - postgres: - image: postgres:14 - environment: - POSTGRES_USER: maia - POSTGRES_PASSWORD: maia - POSTGRES_DB: maia - ports: - - "5432:5432" + # ----- Backend API (Uvicorn/FastAPI/Django etc.) ----- + api: + build: + context: . + dockerfile: Dockerfile + container_name: MAIA-API + command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload volumes: - - postgres_data:/var/lib/postgresql/data - - redis: - image: redis:7 + - .:/app ports: - - "6379:6379" + - "8000:8000" + environment: + - DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia + - REDIS_URL=redis://redis:6379/0 + depends_on: + - db + - redis + networks: + - maia_network + env_file: + - ./.env + restart: unless-stopped + + # ----- Celery Worker ----- + worker: + build: + context: . + dockerfile: Dockerfile + container_name: MAIA-Worker + command: celery -A core.celery_app worker --loglevel=info + volumes: + - .:/app + environment: + - DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia + - REDIS_URL=redis://redis:6379/0 + depends_on: + - db + - redis + env_file: + - ./.env + networks: + - maia_network + restart: unless-stopped + + # ----- Database (PostgreSQL) ----- + db: + image: postgres:15 # Use a specific version + container_name: MAIA-DB + volumes: + - postgres_data:/var/lib/postgresql/data # Persist data using a named volume + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DB=maia + env_file: + - ./.env + networks: + - maia_network + restart: unless-stopped + + # ----- Cache (Redis) ----- + redis: + image: redis:7 # Use a specific version + container_name: MAIA-Redis volumes: - redis_data:/data + networks: + - maia_network + restart: unless-stopped +# ----- Volumes Definition ----- volumes: - postgres_data: - redis_data: \ No newline at end of file + postgres_data: # Define the named volume for PostgreSQL + redis_data: # Define the named volume for Redis + +# ----- Network Definition ----- +networks: + maia_network: # Define a custom bridge network + driver: bridge \ No newline at end of file diff --git a/backend/modules/admin/__pycache__/api.cpython-312.pyc b/backend/modules/admin/__pycache__/api.cpython-312.pyc index 6f2b3a9..e2753a4 100644 Binary files a/backend/modules/admin/__pycache__/api.cpython-312.pyc and b/backend/modules/admin/__pycache__/api.cpython-312.pyc differ diff --git a/backend/modules/admin/__pycache__/tasks.cpython-312.pyc b/backend/modules/admin/__pycache__/tasks.cpython-312.pyc new file mode 100644 index 0000000..5e01d07 Binary files /dev/null and b/backend/modules/admin/__pycache__/tasks.cpython-312.pyc differ diff --git a/backend/modules/admin/api.py b/backend/modules/admin/api.py index 9ca6f98..63990fa 100644 --- a/backend/modules/admin/api.py +++ b/backend/modules/admin/api.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.database import Base, get_db from modules.auth.models import User, UserRole from modules.auth.dependencies import admin_only - +from .tasks import cleardb router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(admin_only)]) @@ -27,20 +27,5 @@ def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]): 'hard'=False: Delete data from tables except users. """ hard = payload.hard # Get 'hard' from the payload - if hard: - # ... existing hard clear logic ... - Base.metadata.drop_all(bind=db.get_bind()) - Base.metadata.create_all(bind=db.get_bind()) - db.commit() - return {"message": "Database reset (HARD)"} - else: - # ... existing soft clear logic ... - tables = Base.metadata.tables.keys() - for table_name in tables: - # delete all tables that isn't the users table - if table_name != "users": - table = Base.metadata.tables[table_name] - print(f"Deleting table: {table_name}") - db.execute(table.delete()) - db.commit() - return {"message": "Database cleared"} \ No newline at end of file + cleardb.delay(hard) + return {"message": "Clearing database in the background", "hard": hard} diff --git a/backend/modules/admin/tasks.py b/backend/modules/admin/tasks.py new file mode 100644 index 0000000..3c03c28 --- /dev/null +++ b/backend/modules/admin/tasks.py @@ -0,0 +1,35 @@ +from core.celery_app import celery_app + +@celery_app.task +def cleardb(hard: bool): + """ + Clear the database based on the 'hard' flag. + 'hard'=True: Drop and recreate all tables. + 'hard'=False: Delete data from tables except users. + """ + from sqlalchemy import create_engine + from sqlalchemy.orm import sessionmaker + from core.config import settings + from core.database import Base + + engine = create_engine(settings.DB_URL) + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + db = SessionLocal() + + if hard: + # Drop and recreate all tables + Base.metadata.drop_all(bind=engine) + Base.metadata.create_all(bind=engine) + db.commit() + return {"message": "Database reset (HARD)"} + else: + # Delete data from tables except users + tables = Base.metadata.tables.keys() + for table_name in tables: + # delete all tables that isn't the users table + if table_name != "users": + table = Base.metadata.tables[table_name] + print(f"Deleting table: {table_name}") + db.execute(table.delete()) + db.commit() + return {"message": "Database cleared"} \ No newline at end of file diff --git a/backend/requirements.in b/backend/requirements.in new file mode 100644 index 0000000..11d6226 --- /dev/null +++ b/backend/requirements.in @@ -0,0 +1,17 @@ +alembic +argon2-cffi +celery +fastapi +gevent +google-auth +google-genai +psycopg2-binary +pydantic +pydantic-settings +python-dotenv +python-jose +python-multipart +redis +SQLAlchemy +starlette +uvicorn \ No newline at end of file diff --git a/backend/requirements.txt b/backend/requirements.txt index 4f680bb..cf3a264 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,46 +1,175 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile +# +alembic==1.15.2 + # via -r requirements.in amqp==5.3.1 + # via kombu annotated-types==0.7.0 + # via pydantic anyio==4.9.0 -bcrypt==4.3.0 + # via + # google-genai + # httpx + # starlette +argon2-cffi==23.1.0 + # via -r requirements.in +argon2-cffi-bindings==21.2.0 + # via argon2-cffi billiard==4.2.1 + # via celery +cachetools==5.5.2 + # via google-auth celery==5.5.1 + # via -r requirements.in +certifi==2025.1.31 + # via + # httpcore + # httpx + # requests cffi==1.17.1 + # via argon2-cffi-bindings +charset-normalizer==3.4.1 + # via requests click==8.1.8 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl + # uvicorn click-didyoumean==0.3.1 + # via celery click-plugins==1.1.1 + # via celery click-repl==0.3.0 -cryptography==44.0.2 + # via celery ecdsa==0.19.1 + # via python-jose fastapi==0.115.12 -greenlet==3.1.1 + # via -r requirements.in +gevent==25.4.1 + # via -r requirements.in +google-auth==2.39.0 + # via + # -r requirements.in + # google-genai +google-genai==1.11.0 + # via -r requirements.in +greenlet==3.2.0 + # via + # gevent + # sqlalchemy h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.8 + # via httpx +httpx==0.28.1 + # via google-genai idna==3.10 -iniconfig==2.1.0 -kombu==5.5.2 -packaging==24.2 -passlib==1.7.4 -pluggy==1.5.0 -prompt_toolkit==3.0.50 + # via + # anyio + # httpx + # requests +kombu==5.5.3 + # via celery +mako==1.3.10 + # via alembic +markupsafe==3.0.2 + # via mako +prompt-toolkit==3.0.51 + # via click-repl psycopg2-binary==2.9.10 + # via -r requirements.in pyasn1==0.4.8 + # via + # pyasn1-modules + # python-jose + # rsa +pyasn1-modules==0.4.1 + # via google-auth pycparser==2.22 + # via cffi pydantic==2.11.3 -pydantic_core==2.33.1 -pytest==8.3.5 + # via + # -r requirements.in + # fastapi + # google-genai + # pydantic-settings +pydantic-core==2.33.1 + # via pydantic +pydantic-settings==2.9.1 + # via -r requirements.in python-dateutil==2.9.0.post0 + # via celery python-dotenv==1.1.0 + # via + # -r requirements.in + # pydantic-settings python-jose==3.4.0 + # via -r requirements.in python-multipart==0.0.20 + # via -r requirements.in redis==5.2.1 -rsa==4.9 + # via -r requirements.in +requests==2.32.3 + # via google-genai +rsa==4.9.1 + # via + # google-auth + # python-jose six==1.17.0 + # via + # ecdsa + # python-dateutil sniffio==1.3.1 -SQLAlchemy==2.0.40 + # via anyio +sqlalchemy==2.0.40 + # via + # -r requirements.in + # alembic starlette==0.46.2 + # via + # -r requirements.in + # fastapi +typing-extensions==4.13.2 + # via + # alembic + # anyio + # fastapi + # google-genai + # pydantic + # pydantic-core + # sqlalchemy + # typing-inspection typing-inspection==0.4.0 -typing_extensions==4.13.2 + # via + # pydantic + # pydantic-settings tzdata==2025.2 -uvicorn==0.34.1 + # via kombu +urllib3==2.4.0 + # via requests +uvicorn==0.34.2 + # via -r requirements.in vine==5.1.0 + # via + # amqp + # celery + # kombu wcwidth==0.2.13 -alembic + # via prompt-toolkit +websockets==15.0.1 + # via google-genai +zope-event==5.0 + # via gevent +zope-interface==7.2 + # via gevent + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/interfaces/nativeapp/src/screens/AdminScreen.tsx b/interfaces/nativeapp/src/screens/AdminScreen.tsx index 9785771..06d0e87 100644 --- a/interfaces/nativeapp/src/screens/AdminScreen.tsx +++ b/interfaces/nativeapp/src/screens/AdminScreen.tsx @@ -1,13 +1,18 @@ import React, { useState } from 'react'; import { View, StyleSheet } from 'react-native'; import { Button, Checkbox, Text, ActivityIndicator, Snackbar } from 'react-native-paper'; -import { clearDatabase } from '../api/admin'; // Revert to standard import without extension +import { clearDatabase } from '../api/admin'; +// Remove useNavigation import if no longer needed elsewhere in this file +// import { useNavigation } from '@react-navigation/native'; +import { useAuth } from '../contexts/AuthContext'; // Import useAuth const AdminScreen = () => { const [isHardClear, setIsHardClear] = useState(false); const [isLoading, setIsLoading] = useState(false); const [snackbarVisible, setSnackbarVisible] = useState(false); const [snackbarMessage, setSnackbarMessage] = useState(''); + // const navigation = useNavigation(); // Remove if not used elsewhere + const { logout } = useAuth(); // Get the logout function from context const handleClearDb = async () => { setIsLoading(true); @@ -16,12 +21,25 @@ const AdminScreen = () => { const response = await clearDatabase(isHardClear); setSnackbarMessage(response.message || 'Database cleared successfully.'); setSnackbarVisible(true); + + // If hard clear was successful, trigger the logout process from AuthContext + if (isHardClear) { + console.log('Hard clear successful, calling logout...'); + await logout(); // Call the logout function from AuthContext + // The RootNavigator will automatically switch to the AuthFlow + // No need to manually navigate or set loading to false here + return; // Exit early + } + } catch (error: any) { console.error("Error clearing database:", error); setSnackbarMessage(error.response?.data?.detail || 'Failed to clear database.'); setSnackbarVisible(true); } finally { - setIsLoading(false); + // Only set loading to false if it wasn't a hard clear (as logout handles navigation) + if (!isHardClear) { + setIsLoading(false); + } } }; @@ -78,4 +96,4 @@ const styles = StyleSheet.create({ }, }); -export default AdminScreen; +export default AdminScreen; \ No newline at end of file