Dockerized everything and added CI/CD deployment

This commit is contained in:
c-d-p
2025-04-22 22:54:31 +02:00
parent bf147af3ef
commit 02d191853b
17 changed files with 434 additions and 71 deletions

90
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,90 @@
# .github/workflows/deploy.yml
name: Build and Deploy Backend
on:
# Triggers the workflow on push events but only for the main branch
push:
branches: [ main ]
paths: # Only run if backend code or Docker config changes
- 'backend/**'
- '.github/workflows/deploy.yml'
- 'backend/docker-compose.yml'
# Allows running of this workflow manually from the Actions tab
workflow_dispatch:
# Ensures the project will never be out of date by running a cron for this job
# Currently set to every Sunday at 3 AM UTC
schedule:
- cron: '0 3 * * 0'
jobs:
build-and-deploy:
runs-on: ubuntu-latest
steps:
# Checks out the repo under $GITHUB_WORKSPACE
- name: Checkout code
uses: actions/checkout@v4
# ------------------------------------------------------------------
# Login to Container Registry (Using GHCR)
# ------------------------------------------------------------------
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }} # GitHub username
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }} # Uses the PAT stored in secrets
# ------------------------------------------------------------------
# Set up Docker Buildx for advanced build features
# ------------------------------------------------------------------
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# ------------------------------------------------------------------
# Build and Push Docker Image
# ------------------------------------------------------------------
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: ./backend
file: ./backend/Dockerfile # Explicit path to Dockerfile
push: true # Push the image after building
tags: | # Use SHA for version specific, latest for general
ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }}
ghcr.io/${{ github.repository_owner }}/MAIA:latest
# Pull latest base image updates when building (good for scheduled runs)
pull: true
cache-from: type=gha # Github Actions cache
cache-to: type=gha,mode=max
# ------------------------------------------------------------------
# Deploy to mara via SSH
# ------------------------------------------------------------------
- name: Deploy to Server
uses: appleboy/ssh-action@v1.0.3
with:
host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |
set -e # Exit script on first error
cd ${{ secrets.DEPLOY_PATH }}
echo "Logged into server: $(pwd)"
# Pull the specific image version built in this workflow
# Using the Git SHA ensures we deploy exactly what was just built
docker pull ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }}
# Also pull latest for other services to keep up to date
docker-compose pull redis db
# Uses sed to update the compose file with the new image tag
sed -i 's|image: ghcr.io/${{ github.repository_owner }}/MAIA:.*|image: ghcr.io/${{ github.repository_owner }}/MAIA:${{ github.sha }}|g' docker-compose.yml
echo "Updated docker-compose.yml image tag"
# Restart the services using the new image(s)
echo "Bringing compose stack down and up with new image..."
docker-compose up -d --force-recreate --remove-orphans api worker db redis
echo "Deployment complete!"

View File

@@ -1,3 +1,5 @@
POSTGRES_USER = "maia"
POSTGRES_PASSWORD = "maia"
PEPPER = "LsD7%" PEPPER = "LsD7%"
JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf" JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf"
GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk" GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk"

19
backend/Dockerfile Normal file
View File

@@ -0,0 +1,19 @@
# backend/Dockerfile
FROM python:3.12-slim
WORKDIR /app
# Set environment variables to prevent buffering issues with logs
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# Install dependencies
COPY ./requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Copy application code (AFTER installing dependencies for better caching)
COPY . /app/
RUN adduser --disabled-password --gecos "" appuser && chown -R appuser /app
USER appuser

0
backend/core/__init__.py Normal file
View File

Binary file not shown.

View File

@@ -1,10 +1,14 @@
# core/celery_app.py # core/celery_app.py
from celery import Celery from celery import Celery
from core.config import settings from core.config import settings # Import your settings
celery = Celery( celery_app = Celery(
"maia", "worker",
broker=f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/0", broker=settings.REDIS_URL,
backend=f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/1", backend=settings.REDIS_URL,
include=["modules.auth.tasks"], # List all task modules here include=["modules.auth.tasks", "modules.admin.tasks"] # Add paths to modules containing tasks
# Add other modules with tasks here, e.g., "modules.some_other_module.tasks"
) )
# Optional: Update Celery configuration directly if needed
# celery_app.conf.update(task_track_started=True)

View File

@@ -1,24 +1,30 @@
# core/config.py # core/config.py
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
from os import getenv from pydantic import Field # Import Field for potential default values if needed
from dotenv import load_dotenv import os
load_dotenv() # Load .env file
class Settings(BaseSettings): class Settings(BaseSettings):
DB_URL: str = "postgresql://maia:maia@localhost:5432/maia" # Database settings - reads DB_URL from environment or .env
DB_URL: str
REDIS_HOST: str = "localhost" # Redis settings - reads REDIS_URL from environment or .env, also used for Celery.
REDIS_PORT: int = 6379 REDIS_URL: str
# JWT settings - reads from environment or .env
JWT_ALGORITHM: str = "HS256" JWT_ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
# ACCESS_TOKEN_EXPIRE_MINUTES: int = 1
REFRESH_TOKEN_EXPIRE_DAYS: int = 7 REFRESH_TOKEN_EXPIRE_DAYS: int = 7
PEPPER: str
PEPPER: str = getenv("PEPPER", "") JWT_SECRET_KEY: str
JWT_SECRET_KEY: str = getenv("JWT_SECRET_KEY", "")
GOOGLE_API_KEY: str = getenv("GOOGLE_API_KEY", "") # Other settings
GOOGLE_API_KEY: str = "" # Example with a default
class Config:
# Tell pydantic-settings to load variables from a .env file
env_file = '.env'
env_file_encoding = 'utf-8'
extra = 'ignore'
# Create a single instance of the settings
settings = Settings() settings = Settings()

View File

@@ -1,23 +1,81 @@
# docker-compose.yml # docker-compose.yml
services: services:
postgres: # ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
image: postgres:14 api:
environment: build:
POSTGRES_USER: maia context: .
POSTGRES_PASSWORD: maia dockerfile: Dockerfile
POSTGRES_DB: maia container_name: MAIA-API
ports: command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload
- "5432:5432"
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - .:/app
redis:
image: redis:7
ports: ports:
- "6379:6379" - "8000:8000"
environment:
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
- REDIS_URL=redis://redis:6379/0
depends_on:
- db
- redis
networks:
- maia_network
env_file:
- ./.env
restart: unless-stopped
# ----- Celery Worker -----
worker:
build:
context: .
dockerfile: Dockerfile
container_name: MAIA-Worker
command: celery -A core.celery_app worker --loglevel=info
volumes:
- .:/app
environment:
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
- REDIS_URL=redis://redis:6379/0
depends_on:
- db
- redis
env_file:
- ./.env
networks:
- maia_network
restart: unless-stopped
# ----- Database (PostgreSQL) -----
db:
image: postgres:15 # Use a specific version
container_name: MAIA-DB
volumes:
- postgres_data:/var/lib/postgresql/data # Persist data using a named volume
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=maia
env_file:
- ./.env
networks:
- maia_network
restart: unless-stopped
# ----- Cache (Redis) -----
redis:
image: redis:7 # Use a specific version
container_name: MAIA-Redis
volumes: volumes:
- redis_data:/data - redis_data:/data
networks:
- maia_network
restart: unless-stopped
# ----- Volumes Definition -----
volumes: volumes:
postgres_data: postgres_data: # Define the named volume for PostgreSQL
redis_data: redis_data: # Define the named volume for Redis
# ----- Network Definition -----
networks:
maia_network: # Define a custom bridge network
driver: bridge

View File

@@ -6,7 +6,7 @@ from sqlalchemy.orm import Session
from core.database import Base, get_db from core.database import Base, get_db
from modules.auth.models import User, UserRole from modules.auth.models import User, UserRole
from modules.auth.dependencies import admin_only from modules.auth.dependencies import admin_only
from .tasks import cleardb
router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(admin_only)]) router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(admin_only)])
@@ -27,20 +27,5 @@ def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]):
'hard'=False: Delete data from tables except users. 'hard'=False: Delete data from tables except users.
""" """
hard = payload.hard # Get 'hard' from the payload hard = payload.hard # Get 'hard' from the payload
if hard: cleardb.delay(hard)
# ... existing hard clear logic ... return {"message": "Clearing database in the background", "hard": hard}
Base.metadata.drop_all(bind=db.get_bind())
Base.metadata.create_all(bind=db.get_bind())
db.commit()
return {"message": "Database reset (HARD)"}
else:
# ... existing soft clear logic ...
tables = Base.metadata.tables.keys()
for table_name in tables:
# delete all tables that isn't the users table
if table_name != "users":
table = Base.metadata.tables[table_name]
print(f"Deleting table: {table_name}")
db.execute(table.delete())
db.commit()
return {"message": "Database cleared"}

View File

@@ -0,0 +1,35 @@
from core.celery_app import celery_app
@celery_app.task
def cleardb(hard: bool):
"""
Clear the database based on the 'hard' flag.
'hard'=True: Drop and recreate all tables.
'hard'=False: Delete data from tables except users.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from core.config import settings
from core.database import Base
engine = create_engine(settings.DB_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
db = SessionLocal()
if hard:
# Drop and recreate all tables
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
db.commit()
return {"message": "Database reset (HARD)"}
else:
# Delete data from tables except users
tables = Base.metadata.tables.keys()
for table_name in tables:
# delete all tables that isn't the users table
if table_name != "users":
table = Base.metadata.tables[table_name]
print(f"Deleting table: {table_name}")
db.execute(table.delete())
db.commit()
return {"message": "Database cleared"}

17
backend/requirements.in Normal file
View File

@@ -0,0 +1,17 @@
alembic
argon2-cffi
celery
fastapi
gevent
google-auth
google-genai
psycopg2-binary
pydantic
pydantic-settings
python-dotenv
python-jose
python-multipart
redis
SQLAlchemy
starlette
uvicorn

View File

@@ -1,46 +1,175 @@
#
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile
#
alembic==1.15.2
# via -r requirements.in
amqp==5.3.1 amqp==5.3.1
# via kombu
annotated-types==0.7.0 annotated-types==0.7.0
# via pydantic
anyio==4.9.0 anyio==4.9.0
bcrypt==4.3.0 # via
# google-genai
# httpx
# starlette
argon2-cffi==23.1.0
# via -r requirements.in
argon2-cffi-bindings==21.2.0
# via argon2-cffi
billiard==4.2.1 billiard==4.2.1
# via celery
cachetools==5.5.2
# via google-auth
celery==5.5.1 celery==5.5.1
# via -r requirements.in
certifi==2025.1.31
# via
# httpcore
# httpx
# requests
cffi==1.17.1 cffi==1.17.1
# via argon2-cffi-bindings
charset-normalizer==3.4.1
# via requests
click==8.1.8 click==8.1.8
# via
# celery
# click-didyoumean
# click-plugins
# click-repl
# uvicorn
click-didyoumean==0.3.1 click-didyoumean==0.3.1
# via celery
click-plugins==1.1.1 click-plugins==1.1.1
# via celery
click-repl==0.3.0 click-repl==0.3.0
cryptography==44.0.2 # via celery
ecdsa==0.19.1 ecdsa==0.19.1
# via python-jose
fastapi==0.115.12 fastapi==0.115.12
greenlet==3.1.1 # via -r requirements.in
gevent==25.4.1
# via -r requirements.in
google-auth==2.39.0
# via
# -r requirements.in
# google-genai
google-genai==1.11.0
# via -r requirements.in
greenlet==3.2.0
# via
# gevent
# sqlalchemy
h11==0.14.0 h11==0.14.0
# via
# httpcore
# uvicorn
httpcore==1.0.8
# via httpx
httpx==0.28.1
# via google-genai
idna==3.10 idna==3.10
iniconfig==2.1.0 # via
kombu==5.5.2 # anyio
packaging==24.2 # httpx
passlib==1.7.4 # requests
pluggy==1.5.0 kombu==5.5.3
prompt_toolkit==3.0.50 # via celery
mako==1.3.10
# via alembic
markupsafe==3.0.2
# via mako
prompt-toolkit==3.0.51
# via click-repl
psycopg2-binary==2.9.10 psycopg2-binary==2.9.10
# via -r requirements.in
pyasn1==0.4.8 pyasn1==0.4.8
# via
# pyasn1-modules
# python-jose
# rsa
pyasn1-modules==0.4.1
# via google-auth
pycparser==2.22 pycparser==2.22
# via cffi
pydantic==2.11.3 pydantic==2.11.3
pydantic_core==2.33.1 # via
pytest==8.3.5 # -r requirements.in
# fastapi
# google-genai
# pydantic-settings
pydantic-core==2.33.1
# via pydantic
pydantic-settings==2.9.1
# via -r requirements.in
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
# via celery
python-dotenv==1.1.0 python-dotenv==1.1.0
# via
# -r requirements.in
# pydantic-settings
python-jose==3.4.0 python-jose==3.4.0
# via -r requirements.in
python-multipart==0.0.20 python-multipart==0.0.20
# via -r requirements.in
redis==5.2.1 redis==5.2.1
rsa==4.9 # via -r requirements.in
requests==2.32.3
# via google-genai
rsa==4.9.1
# via
# google-auth
# python-jose
six==1.17.0 six==1.17.0
# via
# ecdsa
# python-dateutil
sniffio==1.3.1 sniffio==1.3.1
SQLAlchemy==2.0.40 # via anyio
sqlalchemy==2.0.40
# via
# -r requirements.in
# alembic
starlette==0.46.2 starlette==0.46.2
# via
# -r requirements.in
# fastapi
typing-extensions==4.13.2
# via
# alembic
# anyio
# fastapi
# google-genai
# pydantic
# pydantic-core
# sqlalchemy
# typing-inspection
typing-inspection==0.4.0 typing-inspection==0.4.0
typing_extensions==4.13.2 # via
# pydantic
# pydantic-settings
tzdata==2025.2 tzdata==2025.2
uvicorn==0.34.1 # via kombu
urllib3==2.4.0
# via requests
uvicorn==0.34.2
# via -r requirements.in
vine==5.1.0 vine==5.1.0
# via
# amqp
# celery
# kombu
wcwidth==0.2.13 wcwidth==0.2.13
alembic # via prompt-toolkit
websockets==15.0.1
# via google-genai
zope-event==5.0
# via gevent
zope-interface==7.2
# via gevent
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@@ -1,13 +1,18 @@
import React, { useState } from 'react'; import React, { useState } from 'react';
import { View, StyleSheet } from 'react-native'; import { View, StyleSheet } from 'react-native';
import { Button, Checkbox, Text, ActivityIndicator, Snackbar } from 'react-native-paper'; import { Button, Checkbox, Text, ActivityIndicator, Snackbar } from 'react-native-paper';
import { clearDatabase } from '../api/admin'; // Revert to standard import without extension import { clearDatabase } from '../api/admin';
// Remove useNavigation import if no longer needed elsewhere in this file
// import { useNavigation } from '@react-navigation/native';
import { useAuth } from '../contexts/AuthContext'; // Import useAuth
const AdminScreen = () => { const AdminScreen = () => {
const [isHardClear, setIsHardClear] = useState(false); const [isHardClear, setIsHardClear] = useState(false);
const [isLoading, setIsLoading] = useState(false); const [isLoading, setIsLoading] = useState(false);
const [snackbarVisible, setSnackbarVisible] = useState(false); const [snackbarVisible, setSnackbarVisible] = useState(false);
const [snackbarMessage, setSnackbarMessage] = useState(''); const [snackbarMessage, setSnackbarMessage] = useState('');
// const navigation = useNavigation(); // Remove if not used elsewhere
const { logout } = useAuth(); // Get the logout function from context
const handleClearDb = async () => { const handleClearDb = async () => {
setIsLoading(true); setIsLoading(true);
@@ -16,12 +21,25 @@ const AdminScreen = () => {
const response = await clearDatabase(isHardClear); const response = await clearDatabase(isHardClear);
setSnackbarMessage(response.message || 'Database cleared successfully.'); setSnackbarMessage(response.message || 'Database cleared successfully.');
setSnackbarVisible(true); setSnackbarVisible(true);
// If hard clear was successful, trigger the logout process from AuthContext
if (isHardClear) {
console.log('Hard clear successful, calling logout...');
await logout(); // Call the logout function from AuthContext
// The RootNavigator will automatically switch to the AuthFlow
// No need to manually navigate or set loading to false here
return; // Exit early
}
} catch (error: any) { } catch (error: any) {
console.error("Error clearing database:", error); console.error("Error clearing database:", error);
setSnackbarMessage(error.response?.data?.detail || 'Failed to clear database.'); setSnackbarMessage(error.response?.data?.detail || 'Failed to clear database.');
setSnackbarVisible(true); setSnackbarVisible(true);
} finally { } finally {
setIsLoading(false); // Only set loading to false if it wasn't a hard clear (as logout handles navigation)
if (!isHardClear) {
setIsLoading(false);
}
} }
}; };
@@ -78,4 +96,4 @@ const styles = StyleSheet.create({
}, },
}); });
export default AdminScreen; export default AdminScreen;