Compare commits
41 Commits
2c911d2ef4
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32ca73701d | ||
|
|
be68fb17bd | ||
|
|
40217173f0 | ||
|
|
682dc9a715 | ||
|
|
2db929289f | ||
|
|
99e2d13aab | ||
|
|
d3fa5d7271 | ||
|
|
40da78d7cd | ||
|
|
7914be4f4d | ||
|
|
72970780e6 | ||
|
|
0c2bb5454b | ||
|
|
d2de21ebeb | ||
|
|
46c6c410b9 | ||
|
|
1a99d6023c | ||
|
|
57741f5f1c | ||
|
|
0391cb3505 | ||
|
|
5e822da407 | ||
|
|
b8c306721a | ||
|
|
dd6637260a | ||
|
|
558243a657 | ||
|
|
65ac965977 | ||
|
|
d05248a89e | ||
|
|
ceef7f8a10 | ||
|
|
d74819ab58 | ||
|
|
aec528a656 | ||
|
|
9d821cd662 | ||
|
|
03758ede76 | ||
|
|
0d718a6055 | ||
|
|
36d373b95c | ||
|
|
df9023016c | ||
|
|
1928293dc6 | ||
|
|
44b8760ab2 | ||
|
|
12b3f10d4d | ||
|
|
eeb29ccc74 | ||
|
|
d018c27935 | ||
|
|
8e2da8c5dc | ||
|
|
8c13906f2b | ||
|
|
62d6b8bdfd | ||
|
|
04d9136b96 | ||
|
|
22a4fc50a5 | ||
|
|
10e5a3c489 |
274
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,274 @@
|
||||
# .gitea/workflows/deploy.yml
|
||||
|
||||
name: Build and Deploy Backend
|
||||
run-name: ${{ gitea.actor }} deploying backend on Gitea Actions 🚀
|
||||
|
||||
on:
|
||||
# Triggers the workflow on push events but only for the main branch
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'interfaces/nativeapp/**'
|
||||
- '.gitea/workflows/deploy.yml'
|
||||
- 'backend/docker-compose.deploy.yml'
|
||||
|
||||
# Allows running of this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Ensures the project will never be out of date by running a cron for this job
|
||||
# Currently set to every Sunday at 3 AM UTC
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
|
||||
jobs:
|
||||
# ========================================================================
|
||||
# Job to run backend unit tests.
|
||||
# ========================================================================
|
||||
test-backend:
|
||||
name: Run Linters and Tests (Backend)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checks out the repo under $GITHUB_WORKSPACE
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Sets up Python 3.12 environment
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
# Cache pip dependencies for faster reruns
|
||||
# - name: Cache pip dependencies
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: ~/.cache/pip
|
||||
# key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }}
|
||||
# restore-keys: |
|
||||
# ${{ runner.os }}-pip-
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-dev.txt
|
||||
|
||||
- name: Lint with Ruff
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
ruff check .
|
||||
|
||||
- name: Check formatting with Black
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
black --check .
|
||||
|
||||
- name: Run Pytest
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
pytest
|
||||
|
||||
# ========================================================================
|
||||
# Job to build the backend Docker image.
|
||||
# ========================================================================
|
||||
build-backend:
|
||||
name: Build (Backend)
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-backend # Ensure tests pass before deploying
|
||||
|
||||
# Only run this job if triggered by a push to main or manual dispatch/schedule
|
||||
if: gitea.event_name == 'push' || gitea.event_name == 'workflow_dispatch' || gitea.event_name == 'schedule'
|
||||
|
||||
steps:
|
||||
# Checks out the repo under $GITHUB_WORKSPACE
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Login to Container Registry (Using GHCR)
|
||||
# ------------------------------------------------------------------
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.DOCKER_REGISTRY_USERNAME }} # Uses the username stored in secrets
|
||||
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }} # Uses the PAT stored in secrets
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Set up Docker Buildx for advanced build features
|
||||
# ------------------------------------------------------------------
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Build and Push Docker Image
|
||||
# ------------------------------------------------------------------
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./backend
|
||||
file: ./backend/Dockerfile # Explicit path to Dockerfile
|
||||
push: true # Push the image after building
|
||||
tags: | # Use SHA for version specific, latest for general
|
||||
ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:${{ gitea.sha }}
|
||||
ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:latest
|
||||
# Pull latest base image updates when building (good for scheduled runs)
|
||||
pull: true
|
||||
|
||||
# ========================================================================
|
||||
# Job to build the frontend Nginx image.
|
||||
# ========================================================================
|
||||
build-frontend-web:
|
||||
name: Build (Frontend Web)
|
||||
runs-on: ubuntu-latest
|
||||
# needs: test-frontend
|
||||
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Install frontend dependencies
|
||||
working-directory: ./interfaces/nativeapp
|
||||
run: npm ci
|
||||
|
||||
- name: Build Expo web assets
|
||||
working-directory: ./interfaces/nativeapp
|
||||
run: npx expo export --platform web --output-dir dist
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.DOCKER_REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push frontend nginx image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
# Context is the frontend dir where Dockerfile.nginx, nginx.conf and dist/ are
|
||||
context: ./interfaces/nativeapp
|
||||
file: ./interfaces/nativeapp/Dockerfile.nginx # Path to the Nginx Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:${{ gitea.sha }}
|
||||
ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:latest
|
||||
pull: true # Pull base nginx image updates
|
||||
|
||||
|
||||
# ========================================================================
|
||||
# Build Native Android App (Trigger EAS Build)
|
||||
# ========================================================================
|
||||
build-native-android:
|
||||
name: Build Native Android App (EAS)
|
||||
runs-on: ubuntu-latest
|
||||
# needs: test-frontend # Depends on frontend tests passing
|
||||
# Only run for deploy triggers
|
||||
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Install frontend dependencies
|
||||
working-directory: ./interfaces/nativeapp
|
||||
run: npm ci
|
||||
- name: Install EAS CLI
|
||||
run: npm install -g eas-cli
|
||||
- name: Trigger EAS Build for Android
|
||||
working-directory: ./interfaces/nativeapp
|
||||
env:
|
||||
EXPO_TOKEN: ${{ secrets.EXPO_TOKEN }} # EAS token for authentication
|
||||
run: |
|
||||
eas build --platform android --profile production --non-interactive --no-wait
|
||||
|
||||
# ========================================================================
|
||||
# Deploy Backend and Frontend Web to Host
|
||||
# ========================================================================
|
||||
deploy:
|
||||
name: Deploy to Host
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-backend # Wait for backend image build
|
||||
- build-frontend-web # Wait for frontend image build
|
||||
if: gitea.event_name == 'push' || gitea.event_name == 'workflow_dispatch' || gitea.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.DOCKER_REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
|
||||
|
||||
- name: Deploy Locally
|
||||
env:
|
||||
DB_HOST: ${{ vars.DB_HOST }}
|
||||
DB_USER: ${{ vars.DB_USER }}
|
||||
DB_NAME: ${{ vars.DB_NAME }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
REDIS_URL: ${{ vars.REDIS_URL }}
|
||||
PEPPER: ${{ secrets.PEPPER }}
|
||||
JWT_SECRET_KEY: ${{ secrets.JWT_SECRET_KEY }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
run: |
|
||||
#!/bin/bash -ex
|
||||
|
||||
# Define paths and names
|
||||
WORKSPACE_DIR="${{ gitea.workspace }}/backend" # Dir where deploy compose file lives
|
||||
COMPOSE_FILE="${WORKSPACE_DIR}/docker-compose.deploy.yml"
|
||||
PROJECT_NAME="maia" # Project name used by docker compose
|
||||
echo "--- Start Deployment ---"
|
||||
echo "Using compose file: ${COMPOSE_FILE}"
|
||||
|
||||
# --- Verify compose file exists ---
|
||||
if [ ! -f "${COMPOSE_FILE}" ]; then
|
||||
echo "ERROR: Compose file not found at ${COMPOSE_FILE}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Pull specific backend image version ---
|
||||
echo "Pulling backend image ${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:${{ gitea.sha }}..."
|
||||
docker pull ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:${{ gitea.sha }}
|
||||
echo "Backend pull complete."
|
||||
|
||||
# --- Pull specific frontend image version ---
|
||||
echo "Pulling frontend image ${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:${{ gitea.sha }}..."
|
||||
docker pull ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:${{ gitea.sha }}
|
||||
echo "Frontend pull complete."
|
||||
|
||||
# --- Pull other images defined in compose ---
|
||||
echo "Pulling other compose services for project ${PROJECT_NAME}..."
|
||||
cd "${WORKSPACE_DIR}" || exit 1
|
||||
docker compose -p "${PROJECT_NAME}" -f "${COMPOSE_FILE##*/}" pull redis db
|
||||
echo "Other service pull complete."
|
||||
|
||||
# --- Update Backend image tag in compose file ---
|
||||
echo "Updating Backend image tag in ${COMPOSE_FILE##*/}..."
|
||||
sed -i "s|image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:.*|image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:${{ gitea.sha }}|g" "${COMPOSE_FILE##*/}"
|
||||
grep "image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia:" "${COMPOSE_FILE##*/}" || echo "Backend image line not found!"
|
||||
|
||||
# --- Update Frontend image tag in compose file ---
|
||||
echo "Updating Frontend image tag in ${COMPOSE_FILE##*/}..."
|
||||
sed -i "s|image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:.*|image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:${{ gitea.sha }}|g" "${COMPOSE_FILE##*/}"
|
||||
grep "image: ghcr.io/${{ secrets.DOCKER_REGISTRY_USERNAME }}/maia-frontend:" "${COMPOSE_FILE##*/}" || echo "Frontend image line not found!"
|
||||
|
||||
# --- Restart services using updated compose file ---
|
||||
echo "Bringing compose stack down and up for project ${PROJECT_NAME}..."
|
||||
docker compose -p "${PROJECT_NAME}" -f "${COMPOSE_FILE##*/}" up -d --force-recreate --remove-orphans
|
||||
echo "Docker compose up command finished."
|
||||
echo "--- Deployment complete! ---"
|
||||
156
.github/workflows/deploy.yml
vendored
@@ -1,156 +0,0 @@
|
||||
# .github/workflows/deploy.yml
|
||||
|
||||
name: Build and Deploy Backend
|
||||
on:
|
||||
# Triggers the workflow on push events but only for the main branch
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths: # Only run if backend code or Docker config changes
|
||||
- 'backend/**'
|
||||
- '.github/workflows/deploy.yml'
|
||||
- 'backend/docker-compose.yml'
|
||||
|
||||
# Allows running of this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Ensures the project will never be out of date by running a cron for this job
|
||||
# Currently set to every Sunday at 3 AM UTC
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
|
||||
jobs:
|
||||
# ========================================================================
|
||||
# Job to run unit tests.
|
||||
# ========================================================================
|
||||
test:
|
||||
name: Run Linters and Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checks out the repo under $GITHUB_WORKSPACE
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Sets up Python 3.12 environment
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
# Cache pip dependencies for faster reruns
|
||||
- name: Cache pip dependencies
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-dev.txt
|
||||
|
||||
- name: Lint with Ruff
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
ruff check .
|
||||
|
||||
- name: Check formatting with Black
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
black --check .
|
||||
|
||||
- name: Run Pytest
|
||||
working-directory: ./backend
|
||||
run: |
|
||||
pytest
|
||||
|
||||
# ========================================================================
|
||||
# Job to build and deploy the Docker image to mara.
|
||||
# ========================================================================
|
||||
build-and-deploy:
|
||||
name: Build and Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: test # Ensure tests pass before deploying
|
||||
|
||||
# Only run this job if triggered by a push to main or manual dispatch/schedule
|
||||
# This prevents it running for PRs (eventually)
|
||||
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule'
|
||||
|
||||
steps:
|
||||
# Checks out the repo under $GITHUB_WORKSPACE
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Login to Container Registry (Using GHCR)
|
||||
# ------------------------------------------------------------------
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }} # GitHub username
|
||||
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }} # Uses the PAT stored in secrets
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Set up Docker Buildx for advanced build features
|
||||
# ------------------------------------------------------------------
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Build and Push Docker Image
|
||||
# ------------------------------------------------------------------
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./backend
|
||||
file: ./backend/Dockerfile # Explicit path to Dockerfile
|
||||
push: true # Push the image after building
|
||||
tags: | # Use SHA for version specific, latest for general
|
||||
ghcr.io/${{ github.repository_owner }}/maia:${{ github.sha }}
|
||||
ghcr.io/${{ github.repository_owner }}/maia:latest
|
||||
# Pull latest base image updates when building (good for scheduled runs)
|
||||
pull: true
|
||||
cache-from: type=gha # Github Actions cache
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Deploy to mara via SSH
|
||||
# ------------------------------------------------------------------
|
||||
- name: Deploy to Server
|
||||
uses: appleboy/ssh-action@v1.0.3
|
||||
with:
|
||||
host: ${{ secrets.SSH_HOST }}
|
||||
port: ${{ secrets.SSH_PORT }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: |
|
||||
set -e # Exit script on first error
|
||||
cd ${{ secrets.DEPLOY_PATH }}
|
||||
echo "Logged into server: $(pwd)"
|
||||
|
||||
# Log into GHCR on mara
|
||||
echo "Logging into GHCR..."
|
||||
echo ${{ secrets.DOCKER_REGISTRY_TOKEN }} | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin
|
||||
echo "GHCR login completed."
|
||||
|
||||
|
||||
# Pull the specific image version built in this workflow
|
||||
# Using the Git SHA ensures we deploy exactly what was just built
|
||||
echo "Pulling image ${{ github.sha }}..."
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/maia:${{ github.sha }}
|
||||
|
||||
# Also pull latest for other services to keep up to date
|
||||
docker compose pull redis db
|
||||
|
||||
# Uses sed to update the compose file with the new image tag
|
||||
sed -i 's|image: ghcr.io/${{ github.repository_owner }}/maia:.*|image: ghcr.io/${{ github.repository_owner }}/maia:${{ github.sha }}|g' docker-compose.yml
|
||||
echo "Updated docker-compose.yml image tag"
|
||||
|
||||
# Restart the services using the new image(s)
|
||||
echo "Bringing compose stack down and up with new image..."
|
||||
docker compose up -d --force-recreate --remove-orphans api worker db redis
|
||||
echo "Deployment complete!"
|
||||
7
.gitignore
vendored
@@ -1,11 +1,13 @@
|
||||
# backend
|
||||
backend/env
|
||||
backend/.env
|
||||
backend/.env.local
|
||||
backend/.env.prod.bak
|
||||
backend/db
|
||||
backend/redis_data
|
||||
|
||||
# frontend
|
||||
interfaces/nativeapp/node_modules
|
||||
interfaces/nativeapp/.expo/
|
||||
interfaces/nativeapp/dist/
|
||||
interfaces/nativeapp/web-build/
|
||||
interfaces/nativeapp/expo-env.d.ts
|
||||
interfaces/nativeapp/*.orig.*
|
||||
@@ -22,3 +24,4 @@ interfaces/nativeapp/.DS_Store
|
||||
interfaces/nativeapp/*.pem
|
||||
interfaces/nativeapp/.env*.local
|
||||
interfaces/nativeapp/*.tsbuildinfo
|
||||
interfaces/nativeapp/releases
|
||||
|
||||
BIN
MAIA_ICON.xcf
Normal file
@@ -1,5 +0,0 @@
|
||||
POSTGRES_USER = "maia"
|
||||
POSTGRES_PASSWORD = "maia"
|
||||
PEPPER = "LsD7%"
|
||||
JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf"
|
||||
GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk"
|
||||
@@ -63,8 +63,8 @@ version_path_separator = os
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
||||
# sqlalchemy.url = postgresql://user:pass@localhost/dbname
|
||||
# sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy import create_engine # Add create_engine import
|
||||
|
||||
from alembic import context
|
||||
|
||||
@@ -25,6 +25,29 @@ config = context.config
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# --- Construct DB URL from environment variables ---
|
||||
# Use environment variables similar to docker-compose
|
||||
db_user = os.getenv("POSTGRES_USER", "maia") # Default to 'maia' if not set
|
||||
db_password = os.getenv("POSTGRES_PASSWORD", "maia") # Default to 'maia' if not set
|
||||
db_host = os.getenv("DB_HOST", "db") # Default to 'db' service name
|
||||
db_port = os.getenv("DB_PORT", "5432") # Default to '5432'
|
||||
db_name = os.getenv("DB_NAME", "maia") # Default to 'maia'
|
||||
|
||||
# Construct the URL, falling back to alembic.ini if needed
|
||||
url = os.getenv("DB_URL")
|
||||
if not url:
|
||||
# Try constructing from parts if DB_URL isn't set
|
||||
url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
# As a final fallback, use the URL from alembic.ini
|
||||
config_url = config.get_main_option("sqlalchemy.url")
|
||||
if not url and config_url:
|
||||
url = config_url
|
||||
|
||||
# Update the config object so engine_from_config can potentially use it,
|
||||
# though we'll primarily use the constructed 'url' directly.
|
||||
config.set_main_option("sqlalchemy.url", url)
|
||||
# ----------------------------------------------------
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
@@ -51,9 +74,8 @@ def run_migrations_offline() -> None:
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
url=url, # Use the constructed URL
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
@@ -70,11 +92,14 @@ def run_migrations_online() -> None:
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
# Create engine directly using the constructed URL
|
||||
connectable = create_engine(url, poolclass=pool.NullPool)
|
||||
# Original approach using engine_from_config:
|
||||
# connectable = engine_from_config(
|
||||
# config.get_section(config.config_ini_section, {}),
|
||||
# prefix="sqlalchemy.",
|
||||
# poolclass=pool.NullPool,
|
||||
# )
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Initial migration with existing tables
|
||||
|
||||
Revision ID: 69069d6184b3
|
||||
Revises:
|
||||
Create Date: 2025-04-21 01:14:33.233195
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "69069d6184b3"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Add todo table
|
||||
|
||||
Revision ID: 9a82960db482
|
||||
Revises: 69069d6184b3
|
||||
Create Date: 2025-04-21 20:33:27.028529
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9a82960db482"
|
||||
down_revision: Union[str, None] = "69069d6184b3"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,6 +1,6 @@
|
||||
# core/celery_app.py
|
||||
from celery import Celery
|
||||
from core.config import settings # Import your settings
|
||||
from core.config import settings
|
||||
|
||||
celery_app = Celery(
|
||||
"worker",
|
||||
@@ -9,9 +9,15 @@ celery_app = Celery(
|
||||
include=[
|
||||
"modules.auth.tasks",
|
||||
"modules.admin.tasks",
|
||||
], # Add paths to modules containing tasks
|
||||
# Add other modules with tasks here, e.g., "modules.some_other_module.tasks"
|
||||
"modules.calendar.tasks", # Add calendar tasks
|
||||
],
|
||||
)
|
||||
|
||||
# Optional: Update Celery configuration directly if needed
|
||||
# celery_app.conf.update(task_track_started=True)
|
||||
# Optional: Configure Celery Beat if you need periodic tasks later
|
||||
# celery_app.conf.beat_schedule = {
|
||||
# 'check-something-every-5-minutes': {
|
||||
# 'task': 'your_app.tasks.check_something',
|
||||
# 'schedule': timedelta(minutes=5),
|
||||
# },
|
||||
# }
|
||||
celery_app.conf.timezone = "UTC" # Recommended to use UTC
|
||||
|
||||
@@ -6,8 +6,14 @@ DOTENV_PATH = os.path.join(os.path.dirname(__file__), "../.env")
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# Database settings - reads DB_URL from environment or .env
|
||||
DB_URL: str = "postgresql://maia:maia@localhost:5432/maia"
|
||||
# Database settings - reads from environment or .env
|
||||
DB_PORT: int = 5432
|
||||
DB_NAME: str = "maia"
|
||||
DB_HOST: str = "localhost"
|
||||
DB_USER: str = "maia"
|
||||
DB_PASSWORD: str = "maia"
|
||||
|
||||
DB_URL: str = ""
|
||||
|
||||
# Redis settings - reads REDIS_URL from environment or .env, also used for Celery.
|
||||
REDIS_URL: str = "redis://localhost:6379/0"
|
||||
@@ -16,11 +22,12 @@ class Settings(BaseSettings):
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
|
||||
PEPPER: str
|
||||
JWT_SECRET_KEY: str
|
||||
PEPPER: str = "pepper"
|
||||
JWT_SECRET_KEY: str = "secret"
|
||||
|
||||
# Other settings
|
||||
GOOGLE_API_KEY: str = "" # Example with a default
|
||||
GOOGLE_API_KEY: str = "google_api_key"
|
||||
EXPO_PUSH_API_URL: str = "https://exp.host/--/api/v2/push/send"
|
||||
|
||||
class Config:
|
||||
# Tell pydantic-settings to load variables from a .env file
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# core/database.py
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, Session, declarative_base
|
||||
from typing import Generator
|
||||
@@ -10,6 +9,8 @@ Base = declarative_base() # Used for models
|
||||
_engine = None
|
||||
_SessionLocal = None
|
||||
|
||||
settings.DB_URL = f"postgresql://{settings.DB_USER}:{settings.DB_PASSWORD}@{settings.DB_HOST}:{settings.DB_PORT}/{settings.DB_NAME}"
|
||||
|
||||
|
||||
def get_engine():
|
||||
global _engine
|
||||
|
||||
100
backend/docker-compose.deploy.yml
Normal file
@@ -0,0 +1,100 @@
|
||||
services:
|
||||
# ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
|
||||
api:
|
||||
image: ghcr.io/c-d-p/maia:44b8760ab245407ac8dcac435cca6bbfad51284b
|
||||
container_name: MAIA_API
|
||||
restart: unless-stopped
|
||||
command: uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
environment:
|
||||
DB_HOST: ${DB_HOST}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
REDIS_URL: ${REDIS_URL}
|
||||
PEPPER: ${PEPPER}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY}
|
||||
GOOGLE_API_KEY: ${GOOGLE_API_KEY}
|
||||
expose:
|
||||
- "8000"
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
networks:
|
||||
- default
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.maia.rule=Host(`maia.depaoli.id.au`)"
|
||||
- "traefik.http.routers.maia.tls=true"
|
||||
- "traefik.http.routers.maia.entrypoints=secureweb"
|
||||
- "traefik.http.routers.maia.tls.certresolver=myresolver"
|
||||
- "traefik.http.services.maia.loadbalancer.server.port=8000"
|
||||
- "traefik.docker.network=host"
|
||||
|
||||
# ----- Celery Worker -----
|
||||
worker:
|
||||
image: ghcr.io/c-d-p/maia:44b8760ab245407ac8dcac435cca6bbfad51284b
|
||||
container_name: MAIA_Worker
|
||||
restart: unless-stopped
|
||||
command: celery -A core.celery_app worker --loglevel=info
|
||||
environment:
|
||||
DB_HOST: ${DB_HOST}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
REDIS_URL: ${REDIS_URL}
|
||||
PEPPER: ${PEPPER}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY}
|
||||
GOOGLE_API_KEY: $GOOGLE_API_KEY}
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
networks:
|
||||
- default
|
||||
|
||||
# ----- Database (PostgreSQL) -----
|
||||
db:
|
||||
image: postgres:15 # Use a specific version
|
||||
container_name: MAIA_DB
|
||||
volumes:
|
||||
- /srv/docker/container/MAIA/db:/var/lib/postgresql/data # Persist data using a named volume
|
||||
environment:
|
||||
- POSTGRES_USER=${DB_USER}
|
||||
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||
- POSTGRES_DB=${DB_NAME}
|
||||
networks:
|
||||
- default
|
||||
restart: unless-stopped
|
||||
|
||||
# ----- Cache (Redis) -----
|
||||
redis:
|
||||
image: redis:7 # Use a specific version
|
||||
container_name: MAIA_Redis
|
||||
volumes:
|
||||
- /srv/docker/container/MAIA/redis_data:/data
|
||||
networks:
|
||||
- default
|
||||
restart: unless-stopped
|
||||
|
||||
# ----- Frontend (nginx) ------
|
||||
frontend:
|
||||
image: ghcr.io/c-d-p/maia-frontend:latest
|
||||
container_name: MAIA_FRONTEND
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- default
|
||||
expose:
|
||||
- "80"
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.maia-frontend.rule=Host(`app.maia.depaoli.id.au`)"
|
||||
- "traefik.http.routers.maia-frontend.tls=true"
|
||||
- "traefik.http.routers.maia-frontend.entrypoints=secureweb"
|
||||
- "traefik.http.routers.maia-frontend.tls.certresolver=myresolver"
|
||||
- "traefik.http.services.maia-frontend.loadbalancer.server.port=80"
|
||||
- "traefik.docker.network=host"
|
||||
|
||||
# ----- Network Definition -----
|
||||
networks:
|
||||
default: # Define a custom bridge network
|
||||
driver: bridge
|
||||
name: maia_network
|
||||
@@ -1,4 +1,8 @@
|
||||
# docker-compose.yml
|
||||
|
||||
###################
|
||||
### DEV COMPOSE ###
|
||||
###################
|
||||
services:
|
||||
# ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
|
||||
api:
|
||||
@@ -11,16 +15,13 @@ services:
|
||||
- .:/app
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
networks:
|
||||
- maia_network
|
||||
env_file:
|
||||
- ./.env
|
||||
- ./.env.local
|
||||
restart: unless-stopped
|
||||
|
||||
# ----- Celery Worker -----
|
||||
@@ -32,14 +33,11 @@ services:
|
||||
command: celery -A core.celery_app worker --loglevel=info
|
||||
volumes:
|
||||
- .:/app
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
env_file:
|
||||
- ./.env
|
||||
- ./.env.local
|
||||
networks:
|
||||
- maia_network
|
||||
restart: unless-stopped
|
||||
@@ -49,13 +47,13 @@ services:
|
||||
image: postgres:15 # Use a specific version
|
||||
container_name: MAIA-DB
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data # Persist data using a named volume
|
||||
- db:/var/lib/postgresql/data # Persist data using a named volume
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
- POSTGRES_DB=maia
|
||||
- POSTGRES_USER=${DB_USER}
|
||||
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||
- POSTGRES_DB=${DB_NAME}
|
||||
env_file:
|
||||
- ./.env
|
||||
- ./.env.local
|
||||
networks:
|
||||
- maia_network
|
||||
restart: unless-stopped
|
||||
@@ -70,10 +68,11 @@ services:
|
||||
- maia_network
|
||||
restart: unless-stopped
|
||||
|
||||
# ----- Volumes Definition -----
|
||||
volumes:
|
||||
postgres_data: # Define the named volume for PostgreSQL
|
||||
redis_data: # Define the named volume for Redis
|
||||
db: # Named volume for PostgreSQL data
|
||||
driver: local
|
||||
redis_data: # Named volume for Redis data
|
||||
driver: local
|
||||
|
||||
# ----- Network Definition -----
|
||||
networks:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# main.py
|
||||
from contextlib import _AsyncGeneratorContextManager, asynccontextmanager
|
||||
from typing import Any, Callable
|
||||
from fastapi import FastAPI
|
||||
@@ -7,16 +6,10 @@ from core.database import get_engine, Base
|
||||
from modules import router
|
||||
import logging
|
||||
|
||||
|
||||
# import all models to ensure they are registered before create_all
|
||||
|
||||
|
||||
logging.getLogger("passlib").setLevel(logging.ERROR) # fix bc package logging is broken
|
||||
|
||||
|
||||
# Create DB tables (remove in production; use migrations instead)
|
||||
def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]:
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Base.metadata.drop_all(bind=get_engine())
|
||||
@@ -29,25 +22,16 @@ def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]
|
||||
lifespan = lifespan_factory()
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
# Include module router
|
||||
app.include_router(router)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://localhost:8081", # Keep for web testing if needed
|
||||
"http://192.168.1.9:8081", # Add your mobile device/emulator origin (adjust port if needed)
|
||||
"http://192.168.255.221:8081",
|
||||
"https://maia.depaoli.id.au",
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_origins=["https://app.maia.depaoli.id.au"],
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# Health endpoint
|
||||
@app.get("/api/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
@@ -1,26 +1,33 @@
|
||||
# modules/admin/api.py
|
||||
from typing import Annotated
|
||||
from fastapi import APIRouter, Depends # Import Body
|
||||
from pydantic import BaseModel # Import BaseModel
|
||||
from typing import Annotated, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
from core.database import get_db
|
||||
from modules.auth.dependencies import admin_only
|
||||
from modules.auth.models import User
|
||||
from modules.notifications.service import send_push_notification
|
||||
from .tasks import cleardb
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(admin_only)])
|
||||
|
||||
|
||||
# Define a Pydantic model for the request body
|
||||
class ClearDbRequest(BaseModel):
|
||||
hard: bool
|
||||
|
||||
|
||||
class SendNotificationRequest(BaseModel):
|
||||
username: str
|
||||
title: str
|
||||
body: str
|
||||
data: Optional[dict] = None
|
||||
|
||||
|
||||
@router.get("/")
|
||||
def read_admin():
|
||||
return {"message": "Admin route"}
|
||||
|
||||
|
||||
# Change to POST and use the request body model
|
||||
@router.post("/cleardb")
|
||||
def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]):
|
||||
"""
|
||||
@@ -28,6 +35,46 @@ def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]):
|
||||
'hard'=True: Drop and recreate all tables.
|
||||
'hard'=False: Delete data from tables except users.
|
||||
"""
|
||||
hard = payload.hard # Get 'hard' from the payload
|
||||
hard = payload.hard
|
||||
cleardb.delay(hard)
|
||||
return {"message": "Clearing database in the background", "hard": hard}
|
||||
|
||||
|
||||
@router.post("/send-notification", status_code=status.HTTP_200_OK)
|
||||
async def send_user_notification(
|
||||
payload: SendNotificationRequest,
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Admin endpoint to send a push notification to a specific user by username.
|
||||
"""
|
||||
target_user = db.query(User).filter(User.username == payload.username).first()
|
||||
|
||||
if not target_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"User with username '{payload.username}' not found.",
|
||||
)
|
||||
|
||||
if not target_user.expo_push_token:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"User '{payload.username}' does not have a registered push token.",
|
||||
)
|
||||
|
||||
success = await send_push_notification(
|
||||
push_token=target_user.expo_push_token,
|
||||
title=payload.title,
|
||||
body=payload.body,
|
||||
data=payload.data,
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to send push notification via Expo service.",
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Push notification sent successfully to user '{payload.username}'"
|
||||
}
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
# modules/admin/services.py
|
||||
|
||||
|
||||
## temp
|
||||
|
||||
@@ -18,16 +18,13 @@ def cleardb(hard: bool):
|
||||
db = SessionLocal()
|
||||
|
||||
if hard:
|
||||
# Drop and recreate all tables
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
db.commit()
|
||||
return {"message": "Database reset (HARD)"}
|
||||
else:
|
||||
# Delete data from tables except users
|
||||
tables = Base.metadata.tables.keys()
|
||||
for table_name in tables:
|
||||
# delete all tables that isn't the users table
|
||||
if table_name != "users":
|
||||
table = Base.metadata.tables[table_name]
|
||||
print(f"Deleting table: {table_name}")
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
from jose import JWTError
|
||||
@@ -25,7 +24,7 @@ from sqlalchemy.orm import Session
|
||||
from typing import Annotated
|
||||
from core.database import get_db
|
||||
from datetime import timedelta
|
||||
from core.config import settings # Assuming settings is defined in core.config
|
||||
from core.config import settings
|
||||
from core.exceptions import unauthorized_exception
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/dependencies.py
|
||||
from fastapi import Depends
|
||||
from modules.auth.security import get_current_user
|
||||
from modules.auth.schemas import UserRole
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# modules/auth/models.py
|
||||
from core.database import Base
|
||||
from sqlalchemy import Column, Integer, String, Enum, DateTime
|
||||
from sqlalchemy import Column, Integer, String, Enum, DateTime, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
@@ -18,6 +18,7 @@ class User(Base):
|
||||
name = Column(String)
|
||||
role = Column(Enum(UserRole), nullable=False, default=UserRole.USER)
|
||||
hashed_password = Column(String)
|
||||
expo_push_token = Column(Text, nullable=True)
|
||||
calendar_events = relationship("CalendarEvent", back_populates="user")
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/schemas.py
|
||||
from enum import Enum as PyEnum
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ password_hasher = PasswordHasher()
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a password with Argon2 (and optional pepper)."""
|
||||
peppered_password = password + settings.PEPPER # Prepend/append pepper
|
||||
peppered_password = password + settings.PEPPER
|
||||
return password_hasher.hash(peppered_password)
|
||||
|
||||
|
||||
@@ -47,10 +47,8 @@ def authenticate_user(username: str, password: str, db: Session) -> User | None:
|
||||
Authenticate a user by checking username/password against the database.
|
||||
Returns User object if valid, None otherwise.
|
||||
"""
|
||||
# Get user from database
|
||||
user = db.query(User).filter(User.username == username).first()
|
||||
|
||||
# If user not found or password doesn't match
|
||||
if not user or not verify_password(password, user.hashed_password):
|
||||
return None
|
||||
|
||||
@@ -65,7 +63,6 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None):
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
# expire = datetime.now(timezone.utc) + timedelta(seconds=5)
|
||||
to_encode.update({"exp": expire, "token_type": TokenType.ACCESS})
|
||||
return jwt.encode(
|
||||
to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM
|
||||
@@ -89,22 +86,6 @@ def create_refresh_token(data: dict, expires_delta: timedelta | None = None):
|
||||
def verify_token(
|
||||
token: str, expected_token_type: TokenType, db: Session
|
||||
) -> TokenData | None:
|
||||
"""Verify a JWT token and return TokenData if valid.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
token: str
|
||||
The JWT token to be verified.
|
||||
expected_token_type: TokenType
|
||||
The expected type of token (access or refresh)
|
||||
db: Session
|
||||
Database session to fetch user data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
TokenData | None
|
||||
TokenData instance if the token is valid, None otherwise.
|
||||
"""
|
||||
is_blacklisted = (
|
||||
db.query(TokenBlacklist).filter(TokenBlacklist.token == token).first()
|
||||
is not None
|
||||
@@ -137,7 +118,6 @@ def get_current_user(
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Check if the token is blacklisted
|
||||
is_blacklisted = (
|
||||
db.query(TokenBlacklist).filter(TokenBlacklist.token == token).first()
|
||||
is not None
|
||||
@@ -178,7 +158,6 @@ def blacklist_tokens(access_token: str, refresh_token: str, db: Session) -> None
|
||||
)
|
||||
expires_at = datetime.fromtimestamp(payload.get("exp"))
|
||||
|
||||
# Add the token to the blacklist
|
||||
blacklisted_token = TokenBlacklist(token=token, expires_at=expires_at)
|
||||
db.add(blacklisted_token)
|
||||
|
||||
@@ -191,7 +170,6 @@ def blacklist_token(token: str, db: Session) -> None:
|
||||
)
|
||||
expires_at = datetime.fromtimestamp(payload.get("exp"))
|
||||
|
||||
# Add the token to the blacklist
|
||||
blacklisted_token = TokenBlacklist(token=token, expires_at=expires_at)
|
||||
db.add(blacklisted_token)
|
||||
db.commit()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/services.py
|
||||
from sqlalchemy.orm import Session
|
||||
from modules.auth.models import User
|
||||
from modules.auth.schemas import UserResponse
|
||||
|
||||
BIN
backend/modules/calendar/__pycache__/tasks.cpython-312.pyc
Normal file
@@ -1,4 +1,3 @@
|
||||
# modules/calendar/api.py
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime
|
||||
|
||||
@@ -7,7 +7,7 @@ from sqlalchemy import (
|
||||
ForeignKey,
|
||||
JSON,
|
||||
Boolean,
|
||||
) # Add Boolean
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
from core.database import Base
|
||||
|
||||
@@ -18,15 +18,12 @@ class CalendarEvent(Base):
|
||||
id = Column(Integer, primary_key=True)
|
||||
title = Column(String, nullable=False)
|
||||
description = Column(String)
|
||||
start = Column(DateTime, nullable=False)
|
||||
end = Column(DateTime)
|
||||
start = Column(DateTime(timezone=True), nullable=False)
|
||||
end = Column(DateTime(timezone=True))
|
||||
location = Column(String)
|
||||
all_day = Column(Boolean, default=False) # Add all_day column
|
||||
all_day = Column(Boolean, default=False)
|
||||
tags = Column(JSON)
|
||||
color = Column(String) # hex code for color
|
||||
user_id = Column(
|
||||
Integer, ForeignKey("users.id"), nullable=False
|
||||
) # <-- Relationship
|
||||
color = Column(String)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||
|
||||
# Bi-directional relationship (for eager loading)
|
||||
user = relationship("User", back_populates="calendar_events")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# modules/calendar/schemas.py
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, field_validator # Add field_validator
|
||||
from typing import List, Optional # Add List and Optional
|
||||
from pydantic import BaseModel, field_validator
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
# Base schema for common fields, including tags
|
||||
|
||||
@@ -7,7 +7,13 @@ from core.exceptions import not_found_exception
|
||||
from modules.calendar.schemas import (
|
||||
CalendarEventCreate,
|
||||
CalendarEventUpdate,
|
||||
) # Import schemas
|
||||
)
|
||||
|
||||
# Import the celery app instance instead of the task functions directly
|
||||
from core.celery_app import celery_app
|
||||
|
||||
# Keep task imports if cancel_event_notifications is still called directly and synchronously
|
||||
from modules.calendar.tasks import cancel_event_notifications
|
||||
|
||||
|
||||
def create_calendar_event(db: Session, user_id: int, event_data: CalendarEventCreate):
|
||||
@@ -23,6 +29,11 @@ def create_calendar_event(db: Session, user_id: int, event_data: CalendarEventCr
|
||||
db.add(event)
|
||||
db.commit()
|
||||
db.refresh(event)
|
||||
# Schedule notifications using send_task
|
||||
celery_app.send_task(
|
||||
"modules.calendar.tasks.schedule_event_notifications", # Task name as string
|
||||
args=[event.id],
|
||||
)
|
||||
return event
|
||||
|
||||
|
||||
@@ -114,10 +125,17 @@ def update_calendar_event(
|
||||
|
||||
db.commit()
|
||||
db.refresh(event)
|
||||
# Re-schedule notifications using send_task
|
||||
celery_app.send_task(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[event.id]
|
||||
)
|
||||
return event
|
||||
|
||||
|
||||
def delete_calendar_event(db: Session, user_id: int, event_id: int):
|
||||
event = get_calendar_event_by_id(db, user_id, event_id) # Reuse get_by_id for check
|
||||
# Cancel any scheduled notifications before deleting
|
||||
# Run synchronously here or make cancel_event_notifications an async task
|
||||
cancel_event_notifications(event_id)
|
||||
db.delete(event)
|
||||
db.commit()
|
||||
|
||||
233
backend/modules/calendar/tasks.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# backend/modules/calendar/tasks.py
|
||||
import logging
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, time, timezone
|
||||
|
||||
from celery import shared_task
|
||||
from celery.exceptions import Ignore
|
||||
|
||||
from core.celery_app import celery_app
|
||||
from core.database import get_db
|
||||
from modules.calendar.models import CalendarEvent
|
||||
from modules.notifications.service import send_push_notification
|
||||
from modules.auth.models import User # Assuming user model is in modules/user/models.py
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Key prefix for storing scheduled task IDs in Redis (or Celery backend)
|
||||
SCHEDULED_TASK_KEY_PREFIX = "calendar_event_tasks:"
|
||||
|
||||
|
||||
def get_scheduled_task_key(event_id: int) -> str:
|
||||
return f"{SCHEDULED_TASK_KEY_PREFIX}{event_id}"
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def schedule_event_notifications(self, event_id: int):
|
||||
"""Schedules reminder notifications for a calendar event."""
|
||||
db_gen = get_db()
|
||||
db = next(db_gen)
|
||||
try:
|
||||
event = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
if not event:
|
||||
logger.warning(
|
||||
f"Calendar event {event_id} not found for scheduling notifications."
|
||||
)
|
||||
raise Ignore() # Don't retry if event doesn't exist
|
||||
|
||||
user = db.query(User).filter(User.id == event.user_id).first()
|
||||
if not user or not user.expo_push_token:
|
||||
logger.warning(
|
||||
f"User {event.user_id} or their push token not found for event {event_id}. Skipping notification scheduling."
|
||||
)
|
||||
# Cancel any potentially existing tasks for this event if user/token is now invalid
|
||||
cancel_event_notifications(event_id)
|
||||
raise Ignore() # Don't retry if user/token missing
|
||||
|
||||
# Cancel any existing notifications for this event first
|
||||
cancel_event_notifications(event_id) # Run synchronously within this task
|
||||
|
||||
scheduled_task_ids = []
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
|
||||
if event.all_day:
|
||||
# Schedule one notification at 6:00 AM in the event's original timezone (or UTC if naive)
|
||||
event_start_date = event.start.date()
|
||||
notification_time_local = datetime.combine(
|
||||
event_start_date, time(6, 0), tzinfo=event.start.tzinfo
|
||||
)
|
||||
# Convert scheduled time to UTC for Celery ETA
|
||||
notification_time_utc = notification_time_local.astimezone(timezone.utc)
|
||||
|
||||
if notification_time_utc > now_utc:
|
||||
task = send_event_notification.apply_async(
|
||||
args=[event.id, user.expo_push_token, "all_day"],
|
||||
eta=notification_time_utc,
|
||||
)
|
||||
scheduled_task_ids.append(task.id)
|
||||
logger.info(
|
||||
f"Scheduled all-day notification for event {event_id} at {notification_time_utc} (Task ID: {task.id})"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"All-day notification time {notification_time_utc} for event {event_id} is in the past. Skipping."
|
||||
)
|
||||
|
||||
else:
|
||||
# Ensure event start time is timezone-aware (assume UTC if naive)
|
||||
event_start_utc = event.start
|
||||
if event_start_utc.tzinfo is None:
|
||||
event_start_utc = event_start_utc.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
event_start_utc = event_start_utc.astimezone(timezone.utc)
|
||||
|
||||
times_before = {
|
||||
"1_hour": timedelta(hours=1),
|
||||
"30_min": timedelta(minutes=30),
|
||||
}
|
||||
|
||||
for label, delta in times_before.items():
|
||||
notification_time_utc = event_start_utc - delta
|
||||
if notification_time_utc > now_utc:
|
||||
task = send_event_notification.apply_async(
|
||||
args=[event.id, user.expo_push_token, label],
|
||||
eta=notification_time_utc,
|
||||
)
|
||||
scheduled_task_ids.append(task.id)
|
||||
logger.info(
|
||||
f"Scheduled {label} notification for event {event_id} at {notification_time_utc} (Task ID: {task.id})"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"{label} notification time {notification_time_utc} for event {event_id} is in the past. Skipping."
|
||||
)
|
||||
|
||||
# Store the new task IDs using Celery backend (Redis)
|
||||
if scheduled_task_ids:
|
||||
key = get_scheduled_task_key(event_id)
|
||||
# Store as a simple comma-separated string
|
||||
celery_app.backend.set(key, ",".join(scheduled_task_ids))
|
||||
logger.debug(f"Stored task IDs for event {event_id}: {scheduled_task_ids}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error scheduling notifications for event {event_id}: {e}")
|
||||
# Optional: Add retry logic if appropriate
|
||||
# self.retry(exc=e, countdown=60)
|
||||
finally:
|
||||
next(db_gen, None) # Ensure db session is closed
|
||||
|
||||
|
||||
# Note: This task calls an async function. Ensure your Celery worker
|
||||
# is configured to handle async tasks (e.g., using gevent, eventlet, or uvicorn worker).
|
||||
@shared_task(bind=True)
|
||||
def send_event_notification(
|
||||
self, event_id: int, user_push_token: str, notification_type: str
|
||||
):
|
||||
"""Sends a single reminder notification for a calendar event."""
|
||||
db_gen = get_db()
|
||||
db = next(db_gen)
|
||||
try:
|
||||
event = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
if not event:
|
||||
logger.warning(
|
||||
f"Calendar event {event_id} not found for sending {notification_type} notification."
|
||||
)
|
||||
raise Ignore() # Don't retry if event doesn't exist
|
||||
|
||||
# Double-check user and token validity at the time of sending
|
||||
user = db.query(User).filter(User.id == event.user_id).first()
|
||||
if not user or user.expo_push_token != user_push_token:
|
||||
logger.warning(
|
||||
f"User {event.user_id} token mismatch or user not found for event {event_id} at notification time. Skipping."
|
||||
)
|
||||
raise Ignore()
|
||||
|
||||
title = f"Upcoming: {event.title}"
|
||||
if notification_type == "all_day":
|
||||
body = f"Today: {event.title}"
|
||||
if event.description:
|
||||
body += f" - {event.description[:50]}" # Add part of description
|
||||
elif notification_type == "1_hour":
|
||||
local_start_time = event.start.astimezone().strftime(
|
||||
"%I:%M %p"
|
||||
) # Convert to local time for display
|
||||
body = f"Starts at {local_start_time} (in 1 hour)"
|
||||
elif notification_type == "30_min":
|
||||
local_start_time = event.start.astimezone().strftime("%I:%M %p")
|
||||
body = f"Starts at {local_start_time} (in 30 mins)"
|
||||
else:
|
||||
body = "Check your calendar for details." # Fallback
|
||||
|
||||
logger.info(
|
||||
f"Sending {notification_type} notification for event {event_id} to token {user_push_token[:10]}..."
|
||||
)
|
||||
try:
|
||||
# Call the async notification service
|
||||
success = asyncio.run(
|
||||
send_push_notification(
|
||||
push_token=user_push_token,
|
||||
title=title,
|
||||
body=body,
|
||||
data={"eventId": event.id, "type": "calendar_reminder"},
|
||||
)
|
||||
)
|
||||
if not success:
|
||||
logger.error(
|
||||
f"Failed to send {notification_type} notification for event {event_id} via service."
|
||||
)
|
||||
# Optional: self.retry(countdown=60) # Retry sending if failed
|
||||
else:
|
||||
logger.info(
|
||||
f"Successfully sent {notification_type} notification for event {event_id}."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error calling send_push_notification for event {event_id}: {e}"
|
||||
)
|
||||
# Optional: self.retry(exc=e, countdown=60)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"General error sending {notification_type} notification for event {event_id}: {e}"
|
||||
)
|
||||
# Optional: self.retry(exc=e, countdown=60)
|
||||
finally:
|
||||
next(db_gen, None) # Ensure db session is closed
|
||||
|
||||
|
||||
# This is run synchronously when called, or can be called as a task itself
|
||||
# @shared_task # Uncomment if you want to call this asynchronously e.g., .delay()
|
||||
def cancel_event_notifications(event_id: int):
|
||||
"""Cancels all scheduled reminder notifications for a calendar event."""
|
||||
key = get_scheduled_task_key(event_id)
|
||||
try:
|
||||
task_ids_bytes = celery_app.backend.get(key)
|
||||
|
||||
if task_ids_bytes:
|
||||
# Decode from bytes (assuming Redis backend)
|
||||
task_ids_str = task_ids_bytes.decode("utf-8")
|
||||
task_ids = task_ids_str.split(",")
|
||||
logger.info(f"Cancelling scheduled tasks for event {event_id}: {task_ids}")
|
||||
revoked_count = 0
|
||||
for task_id in task_ids:
|
||||
if task_id: # Ensure not empty string
|
||||
try:
|
||||
celery_app.control.revoke(
|
||||
task_id.strip(), terminate=True, signal="SIGKILL"
|
||||
)
|
||||
revoked_count += 1
|
||||
except Exception as revoke_err:
|
||||
logger.error(
|
||||
f"Error revoking task {task_id} for event {event_id}: {revoke_err}"
|
||||
)
|
||||
# Delete the key from Redis after attempting revocation
|
||||
celery_app.backend.delete(key)
|
||||
logger.debug(
|
||||
f"Revoked {revoked_count} tasks and removed task ID key {key} from backend for event {event_id}."
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"No scheduled tasks found in backend to cancel for event {event_id} (key: {key})."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error cancelling notifications for event {event_id}: {e}")
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/nlp/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List
|
||||
@@ -8,7 +7,6 @@ from core.database import get_db
|
||||
from modules.auth.dependencies import get_current_user
|
||||
from modules.auth.models import User
|
||||
|
||||
# Import the new service functions and Enum
|
||||
from modules.nlp.service import (
|
||||
process_request,
|
||||
ask_ai,
|
||||
@@ -17,7 +15,6 @@ from modules.nlp.service import (
|
||||
MessageSender,
|
||||
)
|
||||
|
||||
# Import the response schema and the new ChatMessage model for response type hinting
|
||||
from modules.nlp.schemas import ProcessCommandRequest, ProcessCommandResponse
|
||||
from modules.calendar.service import (
|
||||
create_calendar_event,
|
||||
@@ -28,7 +25,6 @@ from modules.calendar.service import (
|
||||
from modules.calendar.models import CalendarEvent
|
||||
from modules.calendar.schemas import CalendarEventCreate, CalendarEventUpdate
|
||||
|
||||
# Import TODO services, schemas, and model
|
||||
from modules.todo import service as todo_service
|
||||
from modules.todo.models import Todo
|
||||
from modules.todo.schemas import TodoCreate, TodoUpdate
|
||||
@@ -38,24 +34,22 @@ from datetime import datetime
|
||||
|
||||
class ChatMessageResponse(BaseModel):
|
||||
id: int
|
||||
sender: MessageSender # Use the enum directly
|
||||
sender: MessageSender
|
||||
text: str
|
||||
timestamp: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True # Allow Pydantic to work with ORM models
|
||||
from_attributes = True
|
||||
|
||||
|
||||
router = APIRouter(prefix="/nlp", tags=["nlp"])
|
||||
|
||||
|
||||
# Helper to format calendar events (expects list of CalendarEvent models)
|
||||
def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
if not events:
|
||||
return ["You have no events matching that criteria."]
|
||||
formatted = ["Here are the events:"]
|
||||
for event in events:
|
||||
# Access attributes directly from the model instance
|
||||
start_str = (
|
||||
event.start.strftime("%Y-%m-%d %H:%M") if event.start else "No start time"
|
||||
)
|
||||
@@ -65,7 +59,6 @@ def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
return formatted
|
||||
|
||||
|
||||
# Helper to format TODO items (expects list of Todo models)
|
||||
def format_todos(todos: List[Todo]) -> List[str]:
|
||||
if not todos:
|
||||
return ["Your TODO list is empty."]
|
||||
@@ -80,7 +73,6 @@ def format_todos(todos: List[Todo]) -> List[str]:
|
||||
return formatted
|
||||
|
||||
|
||||
# Update the response model for the endpoint
|
||||
@router.post("/process-command", response_model=ProcessCommandResponse)
|
||||
def process_command(
|
||||
request_data: ProcessCommandRequest,
|
||||
@@ -92,34 +84,25 @@ def process_command(
|
||||
"""
|
||||
user_input = request_data.user_input
|
||||
|
||||
# --- Save User Message ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.USER, text=user_input
|
||||
)
|
||||
# ------------------------
|
||||
|
||||
command_data = process_request(user_input)
|
||||
intent = command_data["intent"]
|
||||
params = command_data["params"]
|
||||
response_text = command_data["response_text"]
|
||||
|
||||
responses = [response_text] # Start with the initial response
|
||||
responses = [response_text]
|
||||
|
||||
# --- Save Initial AI Response ---
|
||||
# Save the first response generated by process_request
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=response_text
|
||||
)
|
||||
# -----------------------------
|
||||
|
||||
if intent == "error":
|
||||
# Don't raise HTTPException here if we want to save the error message
|
||||
# Instead, return the error response directly
|
||||
# save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=response_text) # Already saved above
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
if intent == "clarification_needed" or intent == "unknown":
|
||||
# save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=response_text) # Already saved above
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
try:
|
||||
@@ -127,11 +110,9 @@ def process_command(
|
||||
case "ask_ai":
|
||||
ai_answer = ask_ai(**params)
|
||||
responses.append(ai_answer)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=ai_answer
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "get_calendar_events":
|
||||
@@ -140,12 +121,10 @@ def process_command(
|
||||
)
|
||||
formatted_responses = format_calendar_events(events)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_calendar_event":
|
||||
@@ -159,20 +138,17 @@ def process_command(
|
||||
title = created_event.title or "Untitled Event"
|
||||
add_response = f"Added: {title} starting at {start_str}."
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_calendar_event":
|
||||
event_id = params.pop("event_id", None)
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for update."
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -188,20 +164,17 @@ def process_command(
|
||||
title = updated_event.title or "Untitled Event"
|
||||
update_response = f"Updated event ID {updated_event.id}: {title}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_calendar_event":
|
||||
event_id = params.get("event_id")
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for delete."
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -213,29 +186,24 @@ def process_command(
|
||||
delete_calendar_event(db, current_user.id, event_id)
|
||||
delete_response = f"Deleted event ID {event_id}."
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
# --- Add TODO Cases ---
|
||||
case "get_todos":
|
||||
todos: List[Todo] = todo_service.get_todos(
|
||||
db, user=current_user, **params
|
||||
)
|
||||
formatted_responses = format_todos(todos)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_todo":
|
||||
@@ -247,14 +215,12 @@ def process_command(
|
||||
f"Added TODO: '{created_todo.task}' (ID: {created_todo.id})."
|
||||
)
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_todo":
|
||||
@@ -279,14 +245,12 @@ def process_command(
|
||||
status = "complete" if params["complete"] else "incomplete"
|
||||
update_response += f" Marked as {status}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_todo":
|
||||
@@ -307,26 +271,21 @@ def process_command(
|
||||
f"Deleted TODO ID {deleted_todo.id}: '{deleted_todo.task}'."
|
||||
)
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
# --- End TODO Cases ---
|
||||
|
||||
case _:
|
||||
print(
|
||||
f"Warning: Unhandled intent '{intent}' reached api.py match statement."
|
||||
)
|
||||
# The initial response_text was already saved
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
except HTTPException as http_exc:
|
||||
# Don't save again if already saved before raising
|
||||
if http_exc.status_code != 400 or ("event_id" not in http_exc.detail.lower()):
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -340,11 +299,9 @@ def process_command(
|
||||
error_response = (
|
||||
"Sorry, I encountered an error while trying to perform that action."
|
||||
)
|
||||
# --- Save Final Error AI Response ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=error_response
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=[error_response])
|
||||
|
||||
|
||||
@@ -355,6 +312,3 @@ def read_chat_history(
|
||||
"""Retrieves the last 50 chat messages for the current user."""
|
||||
history = get_chat_history(db, user_id=current_user.id, limit=50)
|
||||
return history
|
||||
|
||||
|
||||
# -------------------------------------
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# /home/cdp/code/MAIA/backend/modules/nlp/models.py
|
||||
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/nlp/schemas.py
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
|
||||
@@ -9,5 +8,4 @@ class ProcessCommandRequest(BaseModel):
|
||||
|
||||
class ProcessCommandResponse(BaseModel):
|
||||
responses: List[str]
|
||||
# Optional: Keep details if needed for specific frontend logic beyond display
|
||||
# details: dict | None = None
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# modules/nlp/service.py
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc # Import desc for ordering
|
||||
from google import genai
|
||||
|
||||
0
backend/modules/notifications/__init__.py
Normal file
111
backend/modules/notifications/service.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import httpx
|
||||
import logging
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def send_push_notification(
|
||||
push_token: str, title: str, body: str, data: Optional[Dict[str, Any]] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Sends a push notification to a specific Expo push token.
|
||||
|
||||
Args:
|
||||
push_token: The recipient's Expo push token.
|
||||
title: The title of the notification.
|
||||
body: The main message content of the notification.
|
||||
data: Optional dictionary containing extra data to send with the notification.
|
||||
|
||||
Returns:
|
||||
True if the notification was sent successfully (according to Expo API), False otherwise.
|
||||
"""
|
||||
if not push_token:
|
||||
logger.warning("Attempted to send notification but no push token provided.")
|
||||
return False
|
||||
|
||||
message = {
|
||||
"to": push_token,
|
||||
"sound": "default",
|
||||
"title": title,
|
||||
"body": body,
|
||||
"priority": "high",
|
||||
"channelId": "default",
|
||||
}
|
||||
if data:
|
||||
message["data"] = data
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(
|
||||
settings.EXPO_PUSH_API_URL,
|
||||
headers={
|
||||
"Accept": "application/json",
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
json=message,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status() # Raise exception for 4xx/5xx responses
|
||||
|
||||
response_data = response.json()
|
||||
logger.debug(f"Expo push API response: {response_data}")
|
||||
|
||||
# Check for top-level errors first
|
||||
if "errors" in response_data:
|
||||
error_messages = [
|
||||
err.get("message", "Unknown error")
|
||||
for err in response_data["errors"]
|
||||
]
|
||||
logger.error(
|
||||
f"Expo API returned errors for {push_token[:10]}...: {'; '.join(error_messages)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check the status in the data field
|
||||
receipt = response_data.get("data")
|
||||
|
||||
# if receipts is a list
|
||||
if receipt:
|
||||
status = receipt.get("status")
|
||||
|
||||
if status == "ok":
|
||||
logger.info(
|
||||
f"Successfully sent push notification to token: {push_token[:10]}..."
|
||||
)
|
||||
return True
|
||||
else:
|
||||
# Log details if the status is not 'ok'
|
||||
error_details = receipt.get("details")
|
||||
error_message = receipt.get("message")
|
||||
logger.error(
|
||||
f"Failed to send push notification to {push_token[:10]}... "
|
||||
f"Expo status: {status}, Message: {error_message}, Details: {error_details}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
# Log if 'data' is missing, not a list, or an empty list
|
||||
logger.error(
|
||||
f"Unexpected Expo API response format or empty 'data' field for {push_token[:10]}... "
|
||||
f"Response: {response_data}"
|
||||
)
|
||||
return False
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error sending push notification to {push_token[:10]}...: {e.response.status_code} - {e.response.text}"
|
||||
)
|
||||
return False
|
||||
except httpx.RequestError as e:
|
||||
logger.error(
|
||||
f"Network error sending push notification to {push_token[:10]}...: {e}"
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Unexpected error sending push notification to {push_token[:10]}...: {e}"
|
||||
)
|
||||
return False
|
||||
@@ -1,2 +1 @@
|
||||
# backend/modules/todo/__init__.py
|
||||
# This file makes the 'todo' directory a Python package.
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
# backend/modules/todo/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List
|
||||
|
||||
from . import service, schemas
|
||||
from core.database import get_db
|
||||
from modules.auth.dependencies import get_current_user # Corrected import
|
||||
from modules.auth.models import User # Assuming User model is in auth.models
|
||||
from modules.auth.dependencies import get_current_user
|
||||
from modules.auth.models import User
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/todos",
|
||||
tags=["todos"],
|
||||
dependencies=[Depends(get_current_user)], # Corrected dependency
|
||||
dependencies=[Depends(get_current_user)],
|
||||
responses={404: {"description": "Not found"}},
|
||||
)
|
||||
|
||||
@@ -20,7 +19,7 @@ router = APIRouter(
|
||||
def create_todo_endpoint(
|
||||
todo: schemas.TodoCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.create_todo(db=db, todo=todo, user=current_user)
|
||||
|
||||
@@ -30,7 +29,7 @@ def read_todos_endpoint(
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
todos = service.get_todos(db=db, user=current_user, skip=skip, limit=limit)
|
||||
return todos
|
||||
@@ -40,7 +39,7 @@ def read_todos_endpoint(
|
||||
def read_todo_endpoint(
|
||||
todo_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
db_todo = service.get_todo(db=db, todo_id=todo_id, user=current_user)
|
||||
if db_todo is None:
|
||||
@@ -53,7 +52,7 @@ def update_todo_endpoint(
|
||||
todo_id: int,
|
||||
todo_update: schemas.TodoUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.update_todo(
|
||||
db=db, todo_id=todo_id, todo_update=todo_update, user=current_user
|
||||
@@ -64,6 +63,6 @@ def update_todo_endpoint(
|
||||
def delete_todo_endpoint(
|
||||
todo_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.delete_todo(db=db, todo_id=todo_id, user=current_user)
|
||||
|
||||
@@ -14,6 +14,4 @@ class Todo(Base):
|
||||
complete = Column(Boolean, default=False)
|
||||
owner_id = Column(Integer, ForeignKey("users.id"))
|
||||
|
||||
owner = relationship(
|
||||
"User"
|
||||
) # Add relationship if needed, assuming User model exists in auth.models
|
||||
owner = relationship("User")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# modules/user/api.py
|
||||
from typing import Annotated
|
||||
from fastapi import APIRouter, Depends
|
||||
from typing import Annotated, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.database import get_db
|
||||
from core.exceptions import not_found_exception, forbidden_exception
|
||||
@@ -12,6 +12,41 @@ from modules.auth.models import User
|
||||
router = APIRouter(prefix="/user", tags=["user"])
|
||||
|
||||
|
||||
# --- Pydantic Schema for Push Token --- #
|
||||
class PushTokenData(BaseModel):
|
||||
token: str
|
||||
device_name: Optional[str] = None
|
||||
token_type: str # Expecting 'expo'
|
||||
|
||||
|
||||
@router.post("/push-token", status_code=status.HTTP_200_OK)
|
||||
def save_push_token(
|
||||
token_data: PushTokenData,
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
):
|
||||
"""
|
||||
Save the Expo push token for the current user.
|
||||
Requires user to be logged in.
|
||||
"""
|
||||
if token_data.token_type != "expo":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid token_type. Only 'expo' is supported.",
|
||||
)
|
||||
|
||||
# Update the user's push token
|
||||
current_user.expo_push_token = token_data.token
|
||||
# Optionally, you could store device_name somewhere if needed, perhaps in a separate table
|
||||
# For now, we just update the token on the user model
|
||||
|
||||
db.add(current_user)
|
||||
db.commit()
|
||||
db.refresh(current_user)
|
||||
|
||||
return {"message": "Push token saved successfully"}
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
def me(
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pytest
|
||||
pytest_mock
|
||||
pytest-cov # For checking test coverage (optional)
|
||||
ruff # Or flake8, pylint etc. for linting
|
||||
black # For code formatting checks
|
||||
testcontainers
|
||||
faker
|
||||
faker
|
||||
|
||||
@@ -14,4 +14,5 @@ python-multipart
|
||||
redis
|
||||
SQLAlchemy
|
||||
starlette
|
||||
uvicorn
|
||||
uvicorn
|
||||
eventlet
|
||||
|
||||
@@ -47,8 +47,12 @@ click-plugins==1.1.1
|
||||
# via celery
|
||||
click-repl==0.3.0
|
||||
# via celery
|
||||
dnspython==2.7.0
|
||||
# via eventlet
|
||||
ecdsa==0.19.1
|
||||
# via python-jose
|
||||
eventlet==0.39.1
|
||||
# via -r requirements.in
|
||||
fastapi==0.115.12
|
||||
# via -r requirements.in
|
||||
gevent==25.4.1
|
||||
@@ -61,6 +65,7 @@ google-genai==1.11.0
|
||||
# via -r requirements.in
|
||||
greenlet==3.2.0
|
||||
# via
|
||||
# eventlet
|
||||
# gevent
|
||||
# sqlalchemy
|
||||
h11==0.14.0
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from fastapi import status
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone # Add timezone
|
||||
from pytest_mock import MockerFixture # Import MockerFixture
|
||||
|
||||
from tests.helpers import generators
|
||||
from modules.calendar.models import CalendarEvent # Assuming model exists
|
||||
@@ -10,13 +11,14 @@ from tests.conftest import fake
|
||||
|
||||
# Helper function to create an event payload
|
||||
def create_event_payload(start_offset_days=0, end_offset_days=1):
|
||||
start_time = datetime.utcnow() + timedelta(days=start_offset_days)
|
||||
end_time = datetime.utcnow() + timedelta(days=end_offset_days)
|
||||
# Ensure datetimes are timezone-aware (UTC)
|
||||
start_time = datetime.now(timezone.utc) + timedelta(days=start_offset_days)
|
||||
end_time = datetime.now(timezone.utc) + timedelta(days=end_offset_days)
|
||||
return {
|
||||
"title": fake.sentence(nb_words=3),
|
||||
"description": fake.text(),
|
||||
"start": start_time.isoformat(), # Rename start_time to start
|
||||
"end": end_time.isoformat(), # Rename end_time to end
|
||||
"start": start_time.isoformat().replace("+00:00", "Z"), # Ensure Z suffix
|
||||
"end": end_time.isoformat().replace("+00:00", "Z"), # Ensure Z suffix
|
||||
"all_day": fake.boolean(),
|
||||
}
|
||||
|
||||
@@ -31,13 +33,20 @@ def test_create_event_unauthorized(client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_create_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None:
|
||||
"""Test creating a calendar event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock the celery task sending
|
||||
mock_send_task = mocker.patch(
|
||||
"core.celery_app.celery_app.send_task"
|
||||
) # Corrected patch target
|
||||
|
||||
response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -49,7 +58,7 @@ def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
data = response.json()
|
||||
assert data["title"] == payload["title"]
|
||||
assert data["description"] == payload["description"]
|
||||
# Remove the '+ "Z"' as the API doesn't add it
|
||||
# Assert with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["end"] == payload["end"]
|
||||
assert data["all_day"] == payload["all_day"]
|
||||
@@ -62,6 +71,11 @@ def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
assert event_in_db.user_id == user.id
|
||||
assert event_in_db.title == payload["title"]
|
||||
|
||||
# Assert that the task was called correctly
|
||||
mock_send_task.assert_called_once_with(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[data["id"]]
|
||||
)
|
||||
|
||||
|
||||
# --- Test Get Events ---
|
||||
|
||||
@@ -72,36 +86,49 @@ def test_get_events_unauthorized(client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_get_events_success(db: Session, client: TestClient) -> None:
|
||||
def test_get_events_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting all calendar events for a user."""
|
||||
user, password = generators.create_user(db)
|
||||
user, password = generators.create_user(
|
||||
db, username="testuser_get_events"
|
||||
) # Unique username
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
# Create a couple of events for the user
|
||||
payload1 = create_event_payload(0, 1)
|
||||
client.post(
|
||||
create_rsp1 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload1,
|
||||
)
|
||||
assert create_rsp1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload2 = create_event_payload(2, 3)
|
||||
client.post(
|
||||
create_rsp2 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload2,
|
||||
)
|
||||
assert create_rsp2.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Create an event for another user (should not be returned)
|
||||
other_user, other_password = generators.create_user(db)
|
||||
other_user, other_password = generators.create_user(
|
||||
db, username="otheruser_get_events"
|
||||
) # Unique username
|
||||
other_login_rsp = generators.login(db, other_user.username, other_password)
|
||||
other_access_token = other_login_rsp["access_token"]
|
||||
other_payload = create_event_payload(4, 5)
|
||||
client.post(
|
||||
create_rsp_other = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {other_access_token}"},
|
||||
json=other_payload,
|
||||
)
|
||||
assert create_rsp_other.status_code == status.HTTP_201_CREATED
|
||||
|
||||
response = client.get(
|
||||
"/api/calendar/events", headers={"Authorization": f"Bearer {access_token}"}
|
||||
@@ -115,35 +142,51 @@ def test_get_events_success(db: Session, client: TestClient) -> None:
|
||||
assert data[1]["user_id"] == user.id
|
||||
|
||||
|
||||
def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
def test_get_events_filtered(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting filtered calendar events for a user."""
|
||||
user, password = generators.create_user(db)
|
||||
user, password = generators.create_user(
|
||||
db, username="testuser_filter_events"
|
||||
) # Unique username
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
# Create events
|
||||
payload1 = create_event_payload(0, 1) # Today -> Tomorrow
|
||||
client.post(
|
||||
create_rsp1 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload1,
|
||||
)
|
||||
assert create_rsp1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload2 = create_event_payload(5, 6) # In 5 days -> In 6 days
|
||||
client.post(
|
||||
create_rsp2 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload2,
|
||||
)
|
||||
assert create_rsp2.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload3 = create_event_payload(10, 11) # In 10 days -> In 11 days
|
||||
client.post(
|
||||
create_rsp3 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload3,
|
||||
)
|
||||
assert create_rsp3.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Filter for events starting within the next week
|
||||
start_filter = datetime.utcnow().isoformat()
|
||||
end_filter = (datetime.utcnow() + timedelta(days=7)).isoformat()
|
||||
start_filter = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
end_filter = (
|
||||
(datetime.now(timezone.utc) + timedelta(days=7))
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
"/api/calendar/events",
|
||||
@@ -157,7 +200,11 @@ def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
assert data[1]["title"] == payload2["title"]
|
||||
|
||||
# Filter for events starting after 8 days
|
||||
start_filter_late = (datetime.utcnow() + timedelta(days=8)).isoformat()
|
||||
start_filter_late = (
|
||||
(datetime.now(timezone.utc) + timedelta(days=8))
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
response = client.get(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -172,34 +219,48 @@ def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
# --- Test Get Event By ID ---
|
||||
|
||||
|
||||
def test_get_event_by_id_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting a specific event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.get(f"/api/calendar/events/{event_id}")
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_get_event_by_id_success(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting a specific event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.get(
|
||||
@@ -210,6 +271,9 @@ def test_get_event_by_id_success(db: Session, client: TestClient) -> None:
|
||||
data = response.json()
|
||||
assert data["id"] == event_id
|
||||
assert data["title"] == payload["title"]
|
||||
# Assert datetime with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["end"] == payload["end"]
|
||||
assert data["user_id"] == user.id
|
||||
|
||||
|
||||
@@ -227,20 +291,31 @@ def test_get_event_by_id_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_get_event_by_id_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_get"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_get"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to get user1's event
|
||||
@@ -259,17 +334,24 @@ def test_get_event_by_id_forbidden(db: Session, client: TestClient) -> None:
|
||||
# --- Test Update Event ---
|
||||
|
||||
|
||||
def test_update_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating an event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
update_payload = {"title": "Updated Title"}
|
||||
|
||||
@@ -277,12 +359,20 @@ def test_update_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating an event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch(
|
||||
"core.celery_app.celery_app.send_task", return_value=None
|
||||
) # Mock for creation
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -299,6 +389,13 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
"all_day": not payload["all_day"], # Toggle all_day
|
||||
}
|
||||
|
||||
# Mock celery task for update (needs separate mock)
|
||||
mock_send_task_update = mocker.patch(
|
||||
"modules.calendar.service.celery_app.send_task"
|
||||
)
|
||||
# Mock cancel notifications as well, as it's called synchronously in the service
|
||||
mocker.patch("modules.calendar.tasks.cancel_event_notifications")
|
||||
|
||||
response = client.patch(
|
||||
f"/api/calendar/events/{event_id}",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -310,7 +407,8 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
assert data["title"] == update_payload["title"]
|
||||
assert data["description"] == update_payload["description"]
|
||||
assert data["all_day"] == update_payload["all_day"]
|
||||
assert data["start"] == payload["start"] # Check correct field name 'start'
|
||||
# Assert datetime with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["user_id"] == user.id
|
||||
|
||||
# Verify in DB
|
||||
@@ -320,6 +418,17 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
assert event_in_db.description == update_payload["description"]
|
||||
assert event_in_db.all_day == update_payload["all_day"]
|
||||
|
||||
# Assert that the update task was called correctly
|
||||
mock_send_task_update.assert_called_once_with(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[event_id]
|
||||
)
|
||||
# Assert cancel was NOT called because update doesn't cancel
|
||||
# mock_cancel_notifications.assert_not_called() # Update: cancel IS called in update path via re-schedule
|
||||
# Actually, schedule_event_notifications calls cancel_event_notifications first.
|
||||
# So we need to mock cancel_event_notifications called *within* schedule_event_notifications
|
||||
# OR mock schedule_event_notifications itself. Let's stick to mocking send_task.
|
||||
# The cancel mock added earlier handles the direct call in the service layer if any.
|
||||
|
||||
|
||||
def test_update_event_not_found(db: Session, client: TestClient) -> None:
|
||||
"""Test updating a non-existent event."""
|
||||
@@ -337,20 +446,31 @@ def test_update_event_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_update_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_update"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_update"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to update user1's event
|
||||
@@ -371,29 +491,42 @@ def test_update_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
# --- Test Delete Event ---
|
||||
|
||||
|
||||
def test_delete_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test deleting an event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.delete(f"/api/calendar/events/{event_id}")
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_delete_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None:
|
||||
"""Test deleting an event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock the celery task sending for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -408,12 +541,20 @@ def test_delete_event_success(db: Session, client: TestClient) -> None:
|
||||
event_in_db = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
assert event_in_db is not None
|
||||
|
||||
# Mock the cancel_event_notifications function to prevent Redis call
|
||||
mock_cancel_notifications = mocker.patch(
|
||||
"modules.calendar.service.cancel_event_notifications" # Target the function as used in service.py
|
||||
)
|
||||
|
||||
response = client.delete(
|
||||
f"/api/calendar/events/{event_id}",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# Assert that cancel_event_notifications was called
|
||||
mock_cancel_notifications.assert_called_once_with(event_id)
|
||||
|
||||
# Verify event is deleted from DB
|
||||
event_in_db = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
assert event_in_db is None
|
||||
@@ -441,20 +582,31 @@ def test_delete_event_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_delete_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test deleting another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_delete"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_delete"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to delete user1's event
|
||||
|
||||
2
interfaces/nativeapp/.env.cam
Normal file
@@ -0,0 +1,2 @@
|
||||
EXPO_PUBLIC_API_URL='https://maia.depaoli.id.au/api'
|
||||
EXPO_PROJECT_ID='au.com.seedeep.maia'
|
||||
@@ -1,5 +1,5 @@
|
||||
// App.tsx
|
||||
import React, { useCallback } from 'react'; // Removed useEffect, useState as they are implicitly used by useFonts
|
||||
import React, { useCallback, useEffect } from 'react'; // Add useEffect
|
||||
import { Platform, View } from 'react-native';
|
||||
import { Provider as PaperProvider } from 'react-native-paper';
|
||||
import { NavigationContainer, DarkTheme as NavigationDarkTheme } from '@react-navigation/native'; // Import NavigationDarkTheme
|
||||
@@ -8,10 +8,14 @@ import { StatusBar } from 'expo-status-bar';
|
||||
import * as SplashScreen from 'expo-splash-screen';
|
||||
import { useFonts } from 'expo-font';
|
||||
|
||||
import { AuthProvider } from './src/contexts/AuthContext';
|
||||
import { AuthProvider, useAuth } from './src/contexts/AuthContext'; // Import useAuth
|
||||
import RootNavigator from './src/navigation/RootNavigator';
|
||||
import theme from './src/constants/theme'; // This is the Paper theme
|
||||
// Removed CombinedDarkTheme import as we'll use NavigationDarkTheme directly for NavigationContainer
|
||||
import theme from './src/constants/theme';
|
||||
import {
|
||||
registerForPushNotificationsAsync,
|
||||
sendPushTokenToBackend,
|
||||
setupNotificationHandlers
|
||||
} from './src/services/notificationService'; // Import notification functions
|
||||
|
||||
// Keep the splash screen visible while we fetch resourcesDone, please go ahead with the changes.
|
||||
SplashScreen.preventAutoHideAsync();
|
||||
@@ -30,6 +34,43 @@ const navigationTheme = {
|
||||
},
|
||||
};
|
||||
|
||||
// Wrapper component to handle notification logic after auth state is known
|
||||
function AppContent() {
|
||||
const { user } = useAuth(); // Get user state
|
||||
|
||||
useEffect(() => {
|
||||
// Setup notification handlers (listeners)
|
||||
const cleanupNotificationHandlers = setupNotificationHandlers();
|
||||
|
||||
// Register for push notifications only if user is logged in
|
||||
const registerAndSendToken = async () => {
|
||||
if (user) { // Only register if logged in
|
||||
console.log('[App] User logged in, attempting to register for push notifications...');
|
||||
const token = await registerForPushNotificationsAsync();
|
||||
if (token) {
|
||||
console.log('[App] Push token obtained, sending to backend...');
|
||||
await sendPushTokenToBackend(token);
|
||||
} else {
|
||||
console.log('[App] Could not get push token.');
|
||||
}
|
||||
} else {
|
||||
console.log('[App] User not logged in, skipping push notification registration.');
|
||||
// Optionally: If you need to clear the token on the backend when logged out,
|
||||
// you might need a separate API call here or handle it server-side based on user activity.
|
||||
}
|
||||
};
|
||||
|
||||
registerAndSendToken();
|
||||
|
||||
// Cleanup listeners on component unmount
|
||||
return () => {
|
||||
cleanupNotificationHandlers();
|
||||
};
|
||||
}, [user]); // Re-run when user logs in or out
|
||||
|
||||
return <RootNavigator />;
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
const [fontsLoaded, fontError] = useFonts({
|
||||
'Inter-Regular': require('./src/assets/fonts/Inter-Regular.ttf'),
|
||||
@@ -63,7 +104,8 @@ export default function App() {
|
||||
<PaperProvider theme={theme}>
|
||||
{/* NavigationContainer uses the simplified navigationTheme */}
|
||||
<NavigationContainer theme={navigationTheme}>
|
||||
<RootNavigator />
|
||||
{/* Use AppContent which contains RootNavigator and notification logic */}
|
||||
<AppContent />
|
||||
</NavigationContainer>
|
||||
<StatusBar
|
||||
style="light" // Assuming dark theme
|
||||
|
||||
16
interfaces/nativeapp/Dockerfile.nginx
Normal file
@@ -0,0 +1,16 @@
|
||||
# ./frontend/Dockerfile.nginx
|
||||
|
||||
FROM nginx:1.28-alpine
|
||||
|
||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
|
||||
# Remove default Nginx welcome page
|
||||
RUN rm /usr/share/nginx/html/*
|
||||
|
||||
# Copy the pre-built Expo web output
|
||||
COPY dist/ /usr/share/nginx/html
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
# Start Nginx in the foreground
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"expo": {
|
||||
"name": "webapp",
|
||||
"name": "MAIA",
|
||||
"slug": "webapp",
|
||||
"version": "1.0.0",
|
||||
"orientation": "portrait",
|
||||
"icon": "./assets/icon.png",
|
||||
"userInterfaceStyle": "light",
|
||||
"icon": "./src/assets/MAIA_ICON.png",
|
||||
"userInterfaceStyle": "dark",
|
||||
"newArchEnabled": true,
|
||||
"splash": {
|
||||
"image": "./assets/splash-icon.png",
|
||||
"image": "./src/assets/MAIA_ICON.png",
|
||||
"resizeMode": "contain",
|
||||
"backgroundColor": "#ffffff"
|
||||
},
|
||||
@@ -17,10 +17,12 @@
|
||||
},
|
||||
"android": {
|
||||
"adaptiveIcon": {
|
||||
"foregroundImage": "./assets/adaptive-icon.png",
|
||||
"foregroundImage": "./src/assets/MAIA_ICON.png",
|
||||
"backgroundColor": "#ffffff"
|
||||
},
|
||||
"softwareKeyboardLayoutMode": "resize"
|
||||
"softwareKeyboardLayoutMode": "resize",
|
||||
"package": "au.com.seedeep.maia",
|
||||
"googleServicesFile": "./google-services.json"
|
||||
},
|
||||
"web": {
|
||||
"favicon": "./assets/favicon.png"
|
||||
@@ -28,6 +30,12 @@
|
||||
"plugins": [
|
||||
"expo-secure-store",
|
||||
"expo-font"
|
||||
]
|
||||
],
|
||||
"extra": {
|
||||
"eas": {
|
||||
"projectId": "4d7d70ce-a4d8-4307-8827-8ef713b95b78"
|
||||
}
|
||||
},
|
||||
"owner": "cdp202"
|
||||
}
|
||||
}
|
||||
|
||||
1331
interfaces/nativeapp/dist/_expo/static/js/web/index-0ef639b7ebb0f592fe73fd94db0b4205.js
vendored
Normal file
|
After Width: | Height: | Size: 653 B |
BIN
interfaces/nativeapp/dist/assets/node_modules/@react-navigation/elements/lib/module/assets/back-icon.35ba0eaec5a4f5ed12ca16fabeae451d.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 207 B |
BIN
interfaces/nativeapp/dist/assets/node_modules/@react-navigation/elements/lib/module/assets/clear-icon.c94f6478e7ae0cdd9f15de1fcb9e5e55.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 220 B |
|
After Width: | Height: | Size: 334 B |
|
After Width: | Height: | Size: 502 B |
|
After Width: | Height: | Size: 645 B |
BIN
interfaces/nativeapp/dist/assets/node_modules/@react-navigation/elements/lib/module/assets/close-icon.808e1b1b9b53114ec2838071a7e6daa7.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 141 B |
|
After Width: | Height: | Size: 201 B |
|
After Width: | Height: | Size: 266 B |
|
After Width: | Height: | Size: 332 B |
BIN
interfaces/nativeapp/dist/assets/node_modules/@react-navigation/elements/lib/module/assets/search-icon.286d67d3f74808a60a78d3ebf1a5fb57.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 928 B |
BIN
interfaces/nativeapp/dist/assets/src/assets/MAIA_ICON.55dab8d84a31f13e7cdf223a69c97d91.png
vendored
Normal file
|
After Width: | Height: | Size: 71 KiB |
BIN
interfaces/nativeapp/dist/assets/src/assets/fonts/Inter-Bold.8b04b3bd9435341377d7f4b4d68b6ecc.ttf
vendored
Normal file
BIN
interfaces/nativeapp/dist/assets/src/assets/fonts/Inter-Light.65ec965bd90e1a297cdb3be407420abc.ttf
vendored
Normal file
BIN
interfaces/nativeapp/dist/assets/src/assets/fonts/Inter-Medium.4591e900425d177e6ba268d165bf12e8.ttf
vendored
Normal file
BIN
interfaces/nativeapp/dist/assets/src/assets/fonts/Inter-Regular.e48c1217adab2a0e44f8df400d33c325.ttf
vendored
Normal file
BIN
interfaces/nativeapp/dist/assets/src/assets/fonts/Inter-Thin.1e9e30c74648950a240427636b6c1992.ttf
vendored
Normal file
BIN
interfaces/nativeapp/dist/favicon.ico
vendored
Normal file
|
After Width: | Height: | Size: 14 KiB |
37
interfaces/nativeapp/dist/index.html
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta httpEquiv="X-UA-Compatible" content="IE=edge" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
||||
<title>MAIA</title>
|
||||
<!-- The `react-native-web` recommended style reset: https://necolas.github.io/react-native-web/docs/setup/#root-element -->
|
||||
<style id="expo-reset">
|
||||
/* These styles make the body full-height */
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
}
|
||||
/* These styles disable body scrolling if you are using <ScrollView> */
|
||||
body {
|
||||
overflow: hidden;
|
||||
}
|
||||
/* These styles make the root element full-height */
|
||||
#root {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
flex: 1;
|
||||
}
|
||||
</style>
|
||||
<link rel="shortcut icon" href="/favicon.ico" /></head>
|
||||
|
||||
<body>
|
||||
<!-- Use static rendering with Expo Router to support running without JavaScript. -->
|
||||
<noscript>
|
||||
You need to enable JavaScript to run this app.
|
||||
</noscript>
|
||||
<!-- The root element for your Expo app. -->
|
||||
<div id="root"></div>
|
||||
<script src="/_expo/static/js/web/index-0ef639b7ebb0f592fe73fd94db0b4205.js" defer></script>
|
||||
</body>
|
||||
</html>
|
||||
1
interfaces/nativeapp/dist/metadata.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":0,"bundler":"metro","fileMetadata":{}}
|
||||
21
interfaces/nativeapp/eas.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"cli": {
|
||||
"version": ">= 16.3.2",
|
||||
"appVersionSource": "remote"
|
||||
},
|
||||
"build": {
|
||||
"development": {
|
||||
"developmentClient": true,
|
||||
"distribution": "internal"
|
||||
},
|
||||
"preview": {
|
||||
"distribution": "internal"
|
||||
},
|
||||
"production": {
|
||||
"autoIncrement": true
|
||||
}
|
||||
},
|
||||
"submit": {
|
||||
"production": {}
|
||||
}
|
||||
}
|
||||
29
interfaces/nativeapp/google-services.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"project_info": {
|
||||
"project_number": "190108602323",
|
||||
"project_id": "maia-4ddcf",
|
||||
"storage_bucket": "maia-4ddcf.firebasestorage.app"
|
||||
},
|
||||
"client": [
|
||||
{
|
||||
"client_info": {
|
||||
"mobilesdk_app_id": "1:190108602323:android:dd073dd13774d87d64a926",
|
||||
"android_client_info": {
|
||||
"package_name": "au.com.seedeep.maia"
|
||||
}
|
||||
},
|
||||
"oauth_client": [],
|
||||
"api_key": [
|
||||
{
|
||||
"current_key": "AIzaSyBrKtXnwNq_fX3B5ak3kKWFZ4V87-llsEo"
|
||||
}
|
||||
],
|
||||
"services": {
|
||||
"appinvite_service": {
|
||||
"other_platform_oauth_client": []
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"configuration_version": "1"
|
||||
}
|
||||
13
interfaces/nativeapp/maia-firebase-private-key.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "maia-4ddcf",
|
||||
"private_key_id": "8ea1d5b1110f712c1ea863442a267e8b35b2aca7",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDkpd2/2cXAhhtg\n8ogpg6zp4LRQ4+YrHbnMRI4nccHxf8/YGgfi5hEs6OXDLT4bb9FbHMIsq8h6pJXe\nWnkdNaEaAqeebQ83pT7bQKsTDCx/YXenJ31rrwTzq4cjcBhwd04fIfH1bu7vd7ru\nJHFlsf7/Zb93yahfCV0yyP22FIeskIhqUutWY7RTpm6zUFlKs8QKIKiWVOTJiKvo\nNAcUK4BDmeDRKF/2wdFjgkXl7R6Ev9UzWf2+gE19RJY8ml25lGzG+fWLnnhx092x\naClGim3G0FRhQr5iyN++2Q82stWyRS7R85jRb8s/b3LT0knVrPrAAQasBHVcSSfp\n6MO4flp7AgMBAAECggEAGqyE9ZQ0vzSF7iXtH5a2beRidMtZdy81FTDsOorJWuCT\nwTysLdq0Jz6WS1I0XCQL0urEdkymCzS3LST12yP+AthLcK59Z3r2HcLqEkNJz6Rx\nvoTbW1wkIj8g+U/i8f/hE720ifLimfooSw7iUcBVpLrcft9+LnQbtMiA3KSBfW54\nmzYLWanXgBhKVMiGyR3FpnzDoLcO3xbItsLhaF/DlNN5FTvDCNQCQQwrlzkTTC+Q\npBf/Va+UMljIOYWaNfhgwzsiO86KpmKyWiVd+lhnrZfj/KEZjX+e8InSYd/D3dqn\nwnXY84pwRi2THCY0Hs2iDiX9uEnnq6fwh1I4B2xUIQKBgQD4msFXpl6+CU0iepmz\n2xpvo9AFX/VoQYoDz73DnCjcbFxldX6lWy8gEryQCPbB3Ir48xO+/OdVS2xCiSlx\nj+RqlIGf7oPHxEAJyJpiu93n/Zug/EJovjX5PxyD6Ye6ECr23yQfK20YRM/mdlJp\nm/0cZ7jEkXQLermDK1BAtUGd2wKBgQDrcyG47mjwZj9vG/Besl0VX+OTvlxrd2Gx\nAC7e27xkgNViSd8gZTGna0+Kp+sk6iP9h1KAqbFqpQejGPPvxtLkDuFbffjOWNoa\nKd9ERBxf0MEP2/dWiyusDDm+FvhSYAnKfHmtEQc+DMJ+5bNujDuRRcfrXxnmNEdt\n/WcpZ8bn4QKBgA8LXnPtb4JUkcRqYu7NbZYf9bC9k95RSQbeBX/W7WoZbKX/LEDZ\necqZF6wnvrcQn6BdJW7DY0R4If8MyeNDb/E7N3T0PClUqQNujlk3QUCOymI9oc8w\n45dHyHP7J+mMnOz/p/Hy8NEtKN+rfWVCuViEtlu+6aTgMmXLszmXPndNAoGAXh6Z\n/mkffeoBtZK/lbtLRn4cZTUVkMgaPz1Jf0DroGl342CQV0zceoaFN3JEp28JkBGG\nQ3SSPYVW9jXFXbZnG09verlyuln+ZbMTUyC/DvZOFt7hkrDzdkU01+4quhM2FsGH\nik1iTcWgAkYkYi6gqUPx1P8hRUrkuu0vTff0JUECgYBUf3Jhoh6XqLMMdnQvEj1Z\ndcrzdKFoSCB9sVuBqaEFu5sHQwc3HIodXGW1LT0eA7N0UAs4AZViNxCfMKCYoH13\nUIP2+EGy+a2RNkoezEANG0wwRa49yot8aDYQRNvKORIdkD10RIVORb0RJPldTpGP\nl9FKkEe5IAsEbwyn3pNmSQ==\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "firebase-adminsdk-fbsvc@maia-4ddcf.iam.gserviceaccount.com",
|
||||
"client_id": "100360447602089015870",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-fbsvc%40maia-4ddcf.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||