Compare commits
10 Commits
2c911d2ef4
...
44b8760ab2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44b8760ab2 | ||
|
|
12b3f10d4d | ||
|
|
eeb29ccc74 | ||
|
|
d018c27935 | ||
|
|
8e2da8c5dc | ||
|
|
8c13906f2b | ||
|
|
62d6b8bdfd | ||
|
|
04d9136b96 | ||
|
|
22a4fc50a5 | ||
|
|
10e5a3c489 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
# backend
|
||||
backend/env
|
||||
backend/.env
|
||||
backend/db
|
||||
backend/redis_data
|
||||
|
||||
# frontend
|
||||
interfaces/nativeapp/node_modules
|
||||
@@ -22,3 +24,4 @@ interfaces/nativeapp/.DS_Store
|
||||
interfaces/nativeapp/*.pem
|
||||
interfaces/nativeapp/.env*.local
|
||||
interfaces/nativeapp/*.tsbuildinfo
|
||||
interfaces/nativeapp/releases
|
||||
|
||||
BIN
MAIA_ICON.xcf
Normal file
BIN
MAIA_ICON.xcf
Normal file
Binary file not shown.
@@ -1,5 +1,10 @@
|
||||
POSTGRES_USER = "maia"
|
||||
POSTGRES_PASSWORD = "maia"
|
||||
DB_HOST = "db"
|
||||
DB_USER = "maia"
|
||||
DB_PASSWORD = "maia"
|
||||
DB_NAME = "maia"
|
||||
|
||||
REDIS_URL = "redis://redis:6379"
|
||||
|
||||
PEPPER = "LsD7%"
|
||||
JWT_SECRET_KEY="1c8cf3ca6972b365f8108dad247e61abdcb6faff5a6c8ba00cb6fa17396702bf"
|
||||
GOOGLE_API_KEY="AIzaSyBrte_mETZJce8qE6cRTSz_fHOjdjlShBk"
|
||||
|
||||
Binary file not shown.
@@ -63,8 +63,8 @@ version_path_separator = os
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
||||
# sqlalchemy.url = postgresql://user:pass@localhost/dbname
|
||||
# sqlalchemy.url = postgresql://maia:maia@db:5432/maia
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy import create_engine # Add create_engine import
|
||||
|
||||
from alembic import context
|
||||
|
||||
@@ -25,6 +25,29 @@ config = context.config
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# --- Construct DB URL from environment variables ---
|
||||
# Use environment variables similar to docker-compose
|
||||
db_user = os.getenv("POSTGRES_USER", "maia") # Default to 'maia' if not set
|
||||
db_password = os.getenv("POSTGRES_PASSWORD", "maia") # Default to 'maia' if not set
|
||||
db_host = os.getenv("DB_HOST", "db") # Default to 'db' service name
|
||||
db_port = os.getenv("DB_PORT", "5432") # Default to '5432'
|
||||
db_name = os.getenv("DB_NAME", "maia") # Default to 'maia'
|
||||
|
||||
# Construct the URL, falling back to alembic.ini if needed
|
||||
url = os.getenv("DB_URL")
|
||||
if not url:
|
||||
# Try constructing from parts if DB_URL isn't set
|
||||
url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
# As a final fallback, use the URL from alembic.ini
|
||||
config_url = config.get_main_option("sqlalchemy.url")
|
||||
if not url and config_url:
|
||||
url = config_url
|
||||
|
||||
# Update the config object so engine_from_config can potentially use it,
|
||||
# though we'll primarily use the constructed 'url' directly.
|
||||
config.set_main_option("sqlalchemy.url", url)
|
||||
# ----------------------------------------------------
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
@@ -51,9 +74,8 @@ def run_migrations_offline() -> None:
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
url=url, # Use the constructed URL
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
@@ -70,11 +92,14 @@ def run_migrations_online() -> None:
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
# Create engine directly using the constructed URL
|
||||
connectable = create_engine(url, poolclass=pool.NullPool)
|
||||
# Original approach using engine_from_config:
|
||||
# connectable = engine_from_config(
|
||||
# config.get_section(config.config_ini_section, {}),
|
||||
# prefix="sqlalchemy.",
|
||||
# poolclass=pool.NullPool,
|
||||
# )
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Initial migration with existing tables
|
||||
|
||||
Revision ID: 69069d6184b3
|
||||
Revises:
|
||||
Create Date: 2025-04-21 01:14:33.233195
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "69069d6184b3"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Add todo table
|
||||
|
||||
Revision ID: 9a82960db482
|
||||
Revises: 69069d6184b3
|
||||
Create Date: 2025-04-21 20:33:27.028529
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9a82960db482"
|
||||
down_revision: Union[str, None] = "69069d6184b3"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
# core/celery_app.py
|
||||
from celery import Celery
|
||||
from core.config import settings # Import your settings
|
||||
from core.config import settings
|
||||
|
||||
celery_app = Celery(
|
||||
"worker",
|
||||
@@ -9,9 +9,15 @@ celery_app = Celery(
|
||||
include=[
|
||||
"modules.auth.tasks",
|
||||
"modules.admin.tasks",
|
||||
], # Add paths to modules containing tasks
|
||||
# Add other modules with tasks here, e.g., "modules.some_other_module.tasks"
|
||||
"modules.calendar.tasks", # Add calendar tasks
|
||||
],
|
||||
)
|
||||
|
||||
# Optional: Update Celery configuration directly if needed
|
||||
# celery_app.conf.update(task_track_started=True)
|
||||
# Optional: Configure Celery Beat if you need periodic tasks later
|
||||
# celery_app.conf.beat_schedule = {
|
||||
# 'check-something-every-5-minutes': {
|
||||
# 'task': 'your_app.tasks.check_something',
|
||||
# 'schedule': timedelta(minutes=5),
|
||||
# },
|
||||
# }
|
||||
celery_app.conf.timezone = "UTC" # Recommended to use UTC
|
||||
|
||||
@@ -6,8 +6,14 @@ DOTENV_PATH = os.path.join(os.path.dirname(__file__), "../.env")
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# Database settings - reads DB_URL from environment or .env
|
||||
DB_URL: str = "postgresql://maia:maia@localhost:5432/maia"
|
||||
# Database settings - reads from environment or .env
|
||||
DB_PORT: int = 5432
|
||||
DB_NAME: str = "maia"
|
||||
DB_HOST: str
|
||||
DB_USER: str
|
||||
DB_PASSWORD: str
|
||||
|
||||
DB_URL: str = ""
|
||||
|
||||
# Redis settings - reads REDIS_URL from environment or .env, also used for Celery.
|
||||
REDIS_URL: str = "redis://localhost:6379/0"
|
||||
@@ -20,7 +26,8 @@ class Settings(BaseSettings):
|
||||
JWT_SECRET_KEY: str
|
||||
|
||||
# Other settings
|
||||
GOOGLE_API_KEY: str = "" # Example with a default
|
||||
GOOGLE_API_KEY: str
|
||||
EXPO_PUSH_API_URL: str = "https://exp.host/--/api/v2/push/send"
|
||||
|
||||
class Config:
|
||||
# Tell pydantic-settings to load variables from a .env file
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# core/database.py
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, Session, declarative_base
|
||||
from typing import Generator
|
||||
@@ -10,6 +9,8 @@ Base = declarative_base() # Used for models
|
||||
_engine = None
|
||||
_SessionLocal = None
|
||||
|
||||
settings.DB_URL = f"postgresql://{settings.DB_USER}:{settings.DB_PASSWORD}@{settings.DB_HOST}:{settings.DB_PORT}/{settings.DB_NAME}"
|
||||
|
||||
|
||||
def get_engine():
|
||||
global _engine
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
# docker-compose.yml
|
||||
|
||||
###################
|
||||
### DEV COMPOSE ###
|
||||
###################
|
||||
services:
|
||||
# ----- Backend API (Uvicorn/FastAPI/Django etc.) -----
|
||||
api:
|
||||
@@ -11,9 +15,6 @@ services:
|
||||
- .:/app
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
@@ -32,9 +33,6 @@ services:
|
||||
command: celery -A core.celery_app worker --loglevel=info
|
||||
volumes:
|
||||
- .:/app
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/maia
|
||||
- REDIS_URL=redis://redis:6379/0
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
@@ -49,11 +47,11 @@ services:
|
||||
image: postgres:15 # Use a specific version
|
||||
container_name: MAIA-DB
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data # Persist data using a named volume
|
||||
- db:/var/lib/postgresql/data # Persist data using a named volume
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
- POSTGRES_DB=maia
|
||||
- POSTGRES_USER=${DB_USER}
|
||||
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||
- POSTGRES_DB=${DB_NAME}
|
||||
env_file:
|
||||
- ./.env
|
||||
networks:
|
||||
@@ -70,10 +68,11 @@ services:
|
||||
- maia_network
|
||||
restart: unless-stopped
|
||||
|
||||
# ----- Volumes Definition -----
|
||||
volumes:
|
||||
postgres_data: # Define the named volume for PostgreSQL
|
||||
redis_data: # Define the named volume for Redis
|
||||
db: # Named volume for PostgreSQL data
|
||||
driver: local
|
||||
redis_data: # Named volume for Redis data
|
||||
driver: local
|
||||
|
||||
# ----- Network Definition -----
|
||||
networks:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# main.py
|
||||
from contextlib import _AsyncGeneratorContextManager, asynccontextmanager
|
||||
from typing import Any, Callable
|
||||
from fastapi import FastAPI
|
||||
@@ -7,16 +6,10 @@ from core.database import get_engine, Base
|
||||
from modules import router
|
||||
import logging
|
||||
|
||||
|
||||
# import all models to ensure they are registered before create_all
|
||||
|
||||
|
||||
logging.getLogger("passlib").setLevel(logging.ERROR) # fix bc package logging is broken
|
||||
|
||||
|
||||
# Create DB tables (remove in production; use migrations instead)
|
||||
def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]:
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Base.metadata.drop_all(bind=get_engine())
|
||||
@@ -29,25 +22,16 @@ def lifespan_factory() -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]
|
||||
lifespan = lifespan_factory()
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
# Include module router
|
||||
app.include_router(router)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://localhost:8081", # Keep for web testing if needed
|
||||
"http://192.168.1.9:8081", # Add your mobile device/emulator origin (adjust port if needed)
|
||||
"http://192.168.255.221:8081",
|
||||
"https://maia.depaoli.id.au",
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_origins=["http://localhost:8081", "exp://*", "https://maia.depaoli.id.au"],
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# Health endpoint
|
||||
@app.get("/api/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,26 +1,33 @@
|
||||
# modules/admin/api.py
|
||||
from typing import Annotated
|
||||
from fastapi import APIRouter, Depends # Import Body
|
||||
from pydantic import BaseModel # Import BaseModel
|
||||
from typing import Annotated, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
from core.database import get_db
|
||||
from modules.auth.dependencies import admin_only
|
||||
from modules.auth.models import User
|
||||
from modules.notifications.service import send_push_notification
|
||||
from .tasks import cleardb
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(admin_only)])
|
||||
|
||||
|
||||
# Define a Pydantic model for the request body
|
||||
class ClearDbRequest(BaseModel):
|
||||
hard: bool
|
||||
|
||||
|
||||
class SendNotificationRequest(BaseModel):
|
||||
username: str
|
||||
title: str
|
||||
body: str
|
||||
data: Optional[dict] = None
|
||||
|
||||
|
||||
@router.get("/")
|
||||
def read_admin():
|
||||
return {"message": "Admin route"}
|
||||
|
||||
|
||||
# Change to POST and use the request body model
|
||||
@router.post("/cleardb")
|
||||
def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]):
|
||||
"""
|
||||
@@ -28,6 +35,46 @@ def clear_db(payload: ClearDbRequest, db: Annotated[Session, Depends(get_db)]):
|
||||
'hard'=True: Drop and recreate all tables.
|
||||
'hard'=False: Delete data from tables except users.
|
||||
"""
|
||||
hard = payload.hard # Get 'hard' from the payload
|
||||
hard = payload.hard
|
||||
cleardb.delay(hard)
|
||||
return {"message": "Clearing database in the background", "hard": hard}
|
||||
|
||||
|
||||
@router.post("/send-notification", status_code=status.HTTP_200_OK)
|
||||
async def send_user_notification(
|
||||
payload: SendNotificationRequest,
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Admin endpoint to send a push notification to a specific user by username.
|
||||
"""
|
||||
target_user = db.query(User).filter(User.username == payload.username).first()
|
||||
|
||||
if not target_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"User with username '{payload.username}' not found.",
|
||||
)
|
||||
|
||||
if not target_user.expo_push_token:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"User '{payload.username}' does not have a registered push token.",
|
||||
)
|
||||
|
||||
success = await send_push_notification(
|
||||
push_token=target_user.expo_push_token,
|
||||
title=payload.title,
|
||||
body=payload.body,
|
||||
data=payload.data,
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to send push notification via Expo service.",
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Push notification sent successfully to user '{payload.username}'"
|
||||
}
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
# modules/admin/services.py
|
||||
|
||||
|
||||
## temp
|
||||
|
||||
@@ -18,16 +18,13 @@ def cleardb(hard: bool):
|
||||
db = SessionLocal()
|
||||
|
||||
if hard:
|
||||
# Drop and recreate all tables
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
db.commit()
|
||||
return {"message": "Database reset (HARD)"}
|
||||
else:
|
||||
# Delete data from tables except users
|
||||
tables = Base.metadata.tables.keys()
|
||||
for table_name in tables:
|
||||
# delete all tables that isn't the users table
|
||||
if table_name != "users":
|
||||
table = Base.metadata.tables[table_name]
|
||||
print(f"Deleting table: {table_name}")
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,4 +1,3 @@
|
||||
# modules/auth/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
from jose import JWTError
|
||||
@@ -25,7 +24,7 @@ from sqlalchemy.orm import Session
|
||||
from typing import Annotated
|
||||
from core.database import get_db
|
||||
from datetime import timedelta
|
||||
from core.config import settings # Assuming settings is defined in core.config
|
||||
from core.config import settings
|
||||
from core.exceptions import unauthorized_exception
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/dependencies.py
|
||||
from fastapi import Depends
|
||||
from modules.auth.security import get_current_user
|
||||
from modules.auth.schemas import UserRole
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# modules/auth/models.py
|
||||
from core.database import Base
|
||||
from sqlalchemy import Column, Integer, String, Enum, DateTime
|
||||
from sqlalchemy import Column, Integer, String, Enum, DateTime, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
@@ -18,6 +18,7 @@ class User(Base):
|
||||
name = Column(String)
|
||||
role = Column(Enum(UserRole), nullable=False, default=UserRole.USER)
|
||||
hashed_password = Column(String)
|
||||
expo_push_token = Column(Text, nullable=True)
|
||||
calendar_events = relationship("CalendarEvent", back_populates="user")
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/schemas.py
|
||||
from enum import Enum as PyEnum
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ password_hasher = PasswordHasher()
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a password with Argon2 (and optional pepper)."""
|
||||
peppered_password = password + settings.PEPPER # Prepend/append pepper
|
||||
peppered_password = password + settings.PEPPER
|
||||
return password_hasher.hash(peppered_password)
|
||||
|
||||
|
||||
@@ -47,10 +47,8 @@ def authenticate_user(username: str, password: str, db: Session) -> User | None:
|
||||
Authenticate a user by checking username/password against the database.
|
||||
Returns User object if valid, None otherwise.
|
||||
"""
|
||||
# Get user from database
|
||||
user = db.query(User).filter(User.username == username).first()
|
||||
|
||||
# If user not found or password doesn't match
|
||||
if not user or not verify_password(password, user.hashed_password):
|
||||
return None
|
||||
|
||||
@@ -65,7 +63,6 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None):
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
# expire = datetime.now(timezone.utc) + timedelta(seconds=5)
|
||||
to_encode.update({"exp": expire, "token_type": TokenType.ACCESS})
|
||||
return jwt.encode(
|
||||
to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM
|
||||
@@ -89,22 +86,6 @@ def create_refresh_token(data: dict, expires_delta: timedelta | None = None):
|
||||
def verify_token(
|
||||
token: str, expected_token_type: TokenType, db: Session
|
||||
) -> TokenData | None:
|
||||
"""Verify a JWT token and return TokenData if valid.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
token: str
|
||||
The JWT token to be verified.
|
||||
expected_token_type: TokenType
|
||||
The expected type of token (access or refresh)
|
||||
db: Session
|
||||
Database session to fetch user data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
TokenData | None
|
||||
TokenData instance if the token is valid, None otherwise.
|
||||
"""
|
||||
is_blacklisted = (
|
||||
db.query(TokenBlacklist).filter(TokenBlacklist.token == token).first()
|
||||
is not None
|
||||
@@ -137,7 +118,6 @@ def get_current_user(
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Check if the token is blacklisted
|
||||
is_blacklisted = (
|
||||
db.query(TokenBlacklist).filter(TokenBlacklist.token == token).first()
|
||||
is not None
|
||||
@@ -178,7 +158,6 @@ def blacklist_tokens(access_token: str, refresh_token: str, db: Session) -> None
|
||||
)
|
||||
expires_at = datetime.fromtimestamp(payload.get("exp"))
|
||||
|
||||
# Add the token to the blacklist
|
||||
blacklisted_token = TokenBlacklist(token=token, expires_at=expires_at)
|
||||
db.add(blacklisted_token)
|
||||
|
||||
@@ -191,7 +170,6 @@ def blacklist_token(token: str, db: Session) -> None:
|
||||
)
|
||||
expires_at = datetime.fromtimestamp(payload.get("exp"))
|
||||
|
||||
# Add the token to the blacklist
|
||||
blacklisted_token = TokenBlacklist(token=token, expires_at=expires_at)
|
||||
db.add(blacklisted_token)
|
||||
db.commit()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/auth/services.py
|
||||
from sqlalchemy.orm import Session
|
||||
from modules.auth.models import User
|
||||
from modules.auth.schemas import UserResponse
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/modules/calendar/__pycache__/tasks.cpython-312.pyc
Normal file
BIN
backend/modules/calendar/__pycache__/tasks.cpython-312.pyc
Normal file
Binary file not shown.
@@ -1,4 +1,3 @@
|
||||
# modules/calendar/api.py
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime
|
||||
|
||||
@@ -7,7 +7,7 @@ from sqlalchemy import (
|
||||
ForeignKey,
|
||||
JSON,
|
||||
Boolean,
|
||||
) # Add Boolean
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
from core.database import Base
|
||||
|
||||
@@ -18,15 +18,12 @@ class CalendarEvent(Base):
|
||||
id = Column(Integer, primary_key=True)
|
||||
title = Column(String, nullable=False)
|
||||
description = Column(String)
|
||||
start = Column(DateTime, nullable=False)
|
||||
end = Column(DateTime)
|
||||
start = Column(DateTime(timezone=True), nullable=False)
|
||||
end = Column(DateTime(timezone=True))
|
||||
location = Column(String)
|
||||
all_day = Column(Boolean, default=False) # Add all_day column
|
||||
all_day = Column(Boolean, default=False)
|
||||
tags = Column(JSON)
|
||||
color = Column(String) # hex code for color
|
||||
user_id = Column(
|
||||
Integer, ForeignKey("users.id"), nullable=False
|
||||
) # <-- Relationship
|
||||
color = Column(String)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||
|
||||
# Bi-directional relationship (for eager loading)
|
||||
user = relationship("User", back_populates="calendar_events")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# modules/calendar/schemas.py
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, field_validator # Add field_validator
|
||||
from typing import List, Optional # Add List and Optional
|
||||
from pydantic import BaseModel, field_validator
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
# Base schema for common fields, including tags
|
||||
|
||||
@@ -7,7 +7,13 @@ from core.exceptions import not_found_exception
|
||||
from modules.calendar.schemas import (
|
||||
CalendarEventCreate,
|
||||
CalendarEventUpdate,
|
||||
) # Import schemas
|
||||
)
|
||||
|
||||
# Import the celery app instance instead of the task functions directly
|
||||
from core.celery_app import celery_app
|
||||
|
||||
# Keep task imports if cancel_event_notifications is still called directly and synchronously
|
||||
from modules.calendar.tasks import cancel_event_notifications
|
||||
|
||||
|
||||
def create_calendar_event(db: Session, user_id: int, event_data: CalendarEventCreate):
|
||||
@@ -23,6 +29,11 @@ def create_calendar_event(db: Session, user_id: int, event_data: CalendarEventCr
|
||||
db.add(event)
|
||||
db.commit()
|
||||
db.refresh(event)
|
||||
# Schedule notifications using send_task
|
||||
celery_app.send_task(
|
||||
"modules.calendar.tasks.schedule_event_notifications", # Task name as string
|
||||
args=[event.id],
|
||||
)
|
||||
return event
|
||||
|
||||
|
||||
@@ -114,10 +125,17 @@ def update_calendar_event(
|
||||
|
||||
db.commit()
|
||||
db.refresh(event)
|
||||
# Re-schedule notifications using send_task
|
||||
celery_app.send_task(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[event.id]
|
||||
)
|
||||
return event
|
||||
|
||||
|
||||
def delete_calendar_event(db: Session, user_id: int, event_id: int):
|
||||
event = get_calendar_event_by_id(db, user_id, event_id) # Reuse get_by_id for check
|
||||
# Cancel any scheduled notifications before deleting
|
||||
# Run synchronously here or make cancel_event_notifications an async task
|
||||
cancel_event_notifications(event_id)
|
||||
db.delete(event)
|
||||
db.commit()
|
||||
|
||||
233
backend/modules/calendar/tasks.py
Normal file
233
backend/modules/calendar/tasks.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# backend/modules/calendar/tasks.py
|
||||
import logging
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, time, timezone
|
||||
|
||||
from celery import shared_task
|
||||
from celery.exceptions import Ignore
|
||||
|
||||
from core.celery_app import celery_app
|
||||
from core.database import get_db
|
||||
from modules.calendar.models import CalendarEvent
|
||||
from modules.notifications.service import send_push_notification
|
||||
from modules.auth.models import User # Assuming user model is in modules/user/models.py
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Key prefix for storing scheduled task IDs in Redis (or Celery backend)
|
||||
SCHEDULED_TASK_KEY_PREFIX = "calendar_event_tasks:"
|
||||
|
||||
|
||||
def get_scheduled_task_key(event_id: int) -> str:
|
||||
return f"{SCHEDULED_TASK_KEY_PREFIX}{event_id}"
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def schedule_event_notifications(self, event_id: int):
|
||||
"""Schedules reminder notifications for a calendar event."""
|
||||
db_gen = get_db()
|
||||
db = next(db_gen)
|
||||
try:
|
||||
event = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
if not event:
|
||||
logger.warning(
|
||||
f"Calendar event {event_id} not found for scheduling notifications."
|
||||
)
|
||||
raise Ignore() # Don't retry if event doesn't exist
|
||||
|
||||
user = db.query(User).filter(User.id == event.user_id).first()
|
||||
if not user or not user.expo_push_token:
|
||||
logger.warning(
|
||||
f"User {event.user_id} or their push token not found for event {event_id}. Skipping notification scheduling."
|
||||
)
|
||||
# Cancel any potentially existing tasks for this event if user/token is now invalid
|
||||
cancel_event_notifications(event_id)
|
||||
raise Ignore() # Don't retry if user/token missing
|
||||
|
||||
# Cancel any existing notifications for this event first
|
||||
cancel_event_notifications(event_id) # Run synchronously within this task
|
||||
|
||||
scheduled_task_ids = []
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
|
||||
if event.all_day:
|
||||
# Schedule one notification at 6:00 AM in the event's original timezone (or UTC if naive)
|
||||
event_start_date = event.start.date()
|
||||
notification_time_local = datetime.combine(
|
||||
event_start_date, time(6, 0), tzinfo=event.start.tzinfo
|
||||
)
|
||||
# Convert scheduled time to UTC for Celery ETA
|
||||
notification_time_utc = notification_time_local.astimezone(timezone.utc)
|
||||
|
||||
if notification_time_utc > now_utc:
|
||||
task = send_event_notification.apply_async(
|
||||
args=[event.id, user.expo_push_token, "all_day"],
|
||||
eta=notification_time_utc,
|
||||
)
|
||||
scheduled_task_ids.append(task.id)
|
||||
logger.info(
|
||||
f"Scheduled all-day notification for event {event_id} at {notification_time_utc} (Task ID: {task.id})"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"All-day notification time {notification_time_utc} for event {event_id} is in the past. Skipping."
|
||||
)
|
||||
|
||||
else:
|
||||
# Ensure event start time is timezone-aware (assume UTC if naive)
|
||||
event_start_utc = event.start
|
||||
if event_start_utc.tzinfo is None:
|
||||
event_start_utc = event_start_utc.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
event_start_utc = event_start_utc.astimezone(timezone.utc)
|
||||
|
||||
times_before = {
|
||||
"1_hour": timedelta(hours=1),
|
||||
"30_min": timedelta(minutes=30),
|
||||
}
|
||||
|
||||
for label, delta in times_before.items():
|
||||
notification_time_utc = event_start_utc - delta
|
||||
if notification_time_utc > now_utc:
|
||||
task = send_event_notification.apply_async(
|
||||
args=[event.id, user.expo_push_token, label],
|
||||
eta=notification_time_utc,
|
||||
)
|
||||
scheduled_task_ids.append(task.id)
|
||||
logger.info(
|
||||
f"Scheduled {label} notification for event {event_id} at {notification_time_utc} (Task ID: {task.id})"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"{label} notification time {notification_time_utc} for event {event_id} is in the past. Skipping."
|
||||
)
|
||||
|
||||
# Store the new task IDs using Celery backend (Redis)
|
||||
if scheduled_task_ids:
|
||||
key = get_scheduled_task_key(event_id)
|
||||
# Store as a simple comma-separated string
|
||||
celery_app.backend.set(key, ",".join(scheduled_task_ids))
|
||||
logger.debug(f"Stored task IDs for event {event_id}: {scheduled_task_ids}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error scheduling notifications for event {event_id}: {e}")
|
||||
# Optional: Add retry logic if appropriate
|
||||
# self.retry(exc=e, countdown=60)
|
||||
finally:
|
||||
next(db_gen, None) # Ensure db session is closed
|
||||
|
||||
|
||||
# Note: This task calls an async function. Ensure your Celery worker
|
||||
# is configured to handle async tasks (e.g., using gevent, eventlet, or uvicorn worker).
|
||||
@shared_task(bind=True)
|
||||
def send_event_notification(
|
||||
self, event_id: int, user_push_token: str, notification_type: str
|
||||
):
|
||||
"""Sends a single reminder notification for a calendar event."""
|
||||
db_gen = get_db()
|
||||
db = next(db_gen)
|
||||
try:
|
||||
event = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
if not event:
|
||||
logger.warning(
|
||||
f"Calendar event {event_id} not found for sending {notification_type} notification."
|
||||
)
|
||||
raise Ignore() # Don't retry if event doesn't exist
|
||||
|
||||
# Double-check user and token validity at the time of sending
|
||||
user = db.query(User).filter(User.id == event.user_id).first()
|
||||
if not user or user.expo_push_token != user_push_token:
|
||||
logger.warning(
|
||||
f"User {event.user_id} token mismatch or user not found for event {event_id} at notification time. Skipping."
|
||||
)
|
||||
raise Ignore()
|
||||
|
||||
title = f"Upcoming: {event.title}"
|
||||
if notification_type == "all_day":
|
||||
body = f"Today: {event.title}"
|
||||
if event.description:
|
||||
body += f" - {event.description[:50]}" # Add part of description
|
||||
elif notification_type == "1_hour":
|
||||
local_start_time = event.start.astimezone().strftime(
|
||||
"%I:%M %p"
|
||||
) # Convert to local time for display
|
||||
body = f"Starts at {local_start_time} (in 1 hour)"
|
||||
elif notification_type == "30_min":
|
||||
local_start_time = event.start.astimezone().strftime("%I:%M %p")
|
||||
body = f"Starts at {local_start_time} (in 30 mins)"
|
||||
else:
|
||||
body = "Check your calendar for details." # Fallback
|
||||
|
||||
logger.info(
|
||||
f"Sending {notification_type} notification for event {event_id} to token {user_push_token[:10]}..."
|
||||
)
|
||||
try:
|
||||
# Call the async notification service
|
||||
success = asyncio.run(
|
||||
send_push_notification(
|
||||
push_token=user_push_token,
|
||||
title=title,
|
||||
body=body,
|
||||
data={"eventId": event.id, "type": "calendar_reminder"},
|
||||
)
|
||||
)
|
||||
if not success:
|
||||
logger.error(
|
||||
f"Failed to send {notification_type} notification for event {event_id} via service."
|
||||
)
|
||||
# Optional: self.retry(countdown=60) # Retry sending if failed
|
||||
else:
|
||||
logger.info(
|
||||
f"Successfully sent {notification_type} notification for event {event_id}."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error calling send_push_notification for event {event_id}: {e}"
|
||||
)
|
||||
# Optional: self.retry(exc=e, countdown=60)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"General error sending {notification_type} notification for event {event_id}: {e}"
|
||||
)
|
||||
# Optional: self.retry(exc=e, countdown=60)
|
||||
finally:
|
||||
next(db_gen, None) # Ensure db session is closed
|
||||
|
||||
|
||||
# This is run synchronously when called, or can be called as a task itself
|
||||
# @shared_task # Uncomment if you want to call this asynchronously e.g., .delay()
|
||||
def cancel_event_notifications(event_id: int):
|
||||
"""Cancels all scheduled reminder notifications for a calendar event."""
|
||||
key = get_scheduled_task_key(event_id)
|
||||
try:
|
||||
task_ids_bytes = celery_app.backend.get(key)
|
||||
|
||||
if task_ids_bytes:
|
||||
# Decode from bytes (assuming Redis backend)
|
||||
task_ids_str = task_ids_bytes.decode("utf-8")
|
||||
task_ids = task_ids_str.split(",")
|
||||
logger.info(f"Cancelling scheduled tasks for event {event_id}: {task_ids}")
|
||||
revoked_count = 0
|
||||
for task_id in task_ids:
|
||||
if task_id: # Ensure not empty string
|
||||
try:
|
||||
celery_app.control.revoke(
|
||||
task_id.strip(), terminate=True, signal="SIGKILL"
|
||||
)
|
||||
revoked_count += 1
|
||||
except Exception as revoke_err:
|
||||
logger.error(
|
||||
f"Error revoking task {task_id} for event {event_id}: {revoke_err}"
|
||||
)
|
||||
# Delete the key from Redis after attempting revocation
|
||||
celery_app.backend.delete(key)
|
||||
logger.debug(
|
||||
f"Revoked {revoked_count} tasks and removed task ID key {key} from backend for event {event_id}."
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"No scheduled tasks found in backend to cancel for event {event_id} (key: {key})."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error cancelling notifications for event {event_id}: {e}")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,4 +1,3 @@
|
||||
# modules/nlp/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List
|
||||
@@ -8,7 +7,6 @@ from core.database import get_db
|
||||
from modules.auth.dependencies import get_current_user
|
||||
from modules.auth.models import User
|
||||
|
||||
# Import the new service functions and Enum
|
||||
from modules.nlp.service import (
|
||||
process_request,
|
||||
ask_ai,
|
||||
@@ -17,7 +15,6 @@ from modules.nlp.service import (
|
||||
MessageSender,
|
||||
)
|
||||
|
||||
# Import the response schema and the new ChatMessage model for response type hinting
|
||||
from modules.nlp.schemas import ProcessCommandRequest, ProcessCommandResponse
|
||||
from modules.calendar.service import (
|
||||
create_calendar_event,
|
||||
@@ -28,7 +25,6 @@ from modules.calendar.service import (
|
||||
from modules.calendar.models import CalendarEvent
|
||||
from modules.calendar.schemas import CalendarEventCreate, CalendarEventUpdate
|
||||
|
||||
# Import TODO services, schemas, and model
|
||||
from modules.todo import service as todo_service
|
||||
from modules.todo.models import Todo
|
||||
from modules.todo.schemas import TodoCreate, TodoUpdate
|
||||
@@ -38,24 +34,22 @@ from datetime import datetime
|
||||
|
||||
class ChatMessageResponse(BaseModel):
|
||||
id: int
|
||||
sender: MessageSender # Use the enum directly
|
||||
sender: MessageSender
|
||||
text: str
|
||||
timestamp: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True # Allow Pydantic to work with ORM models
|
||||
from_attributes = True
|
||||
|
||||
|
||||
router = APIRouter(prefix="/nlp", tags=["nlp"])
|
||||
|
||||
|
||||
# Helper to format calendar events (expects list of CalendarEvent models)
|
||||
def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
if not events:
|
||||
return ["You have no events matching that criteria."]
|
||||
formatted = ["Here are the events:"]
|
||||
for event in events:
|
||||
# Access attributes directly from the model instance
|
||||
start_str = (
|
||||
event.start.strftime("%Y-%m-%d %H:%M") if event.start else "No start time"
|
||||
)
|
||||
@@ -65,7 +59,6 @@ def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
return formatted
|
||||
|
||||
|
||||
# Helper to format TODO items (expects list of Todo models)
|
||||
def format_todos(todos: List[Todo]) -> List[str]:
|
||||
if not todos:
|
||||
return ["Your TODO list is empty."]
|
||||
@@ -80,7 +73,6 @@ def format_todos(todos: List[Todo]) -> List[str]:
|
||||
return formatted
|
||||
|
||||
|
||||
# Update the response model for the endpoint
|
||||
@router.post("/process-command", response_model=ProcessCommandResponse)
|
||||
def process_command(
|
||||
request_data: ProcessCommandRequest,
|
||||
@@ -92,34 +84,25 @@ def process_command(
|
||||
"""
|
||||
user_input = request_data.user_input
|
||||
|
||||
# --- Save User Message ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.USER, text=user_input
|
||||
)
|
||||
# ------------------------
|
||||
|
||||
command_data = process_request(user_input)
|
||||
intent = command_data["intent"]
|
||||
params = command_data["params"]
|
||||
response_text = command_data["response_text"]
|
||||
|
||||
responses = [response_text] # Start with the initial response
|
||||
responses = [response_text]
|
||||
|
||||
# --- Save Initial AI Response ---
|
||||
# Save the first response generated by process_request
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=response_text
|
||||
)
|
||||
# -----------------------------
|
||||
|
||||
if intent == "error":
|
||||
# Don't raise HTTPException here if we want to save the error message
|
||||
# Instead, return the error response directly
|
||||
# save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=response_text) # Already saved above
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
if intent == "clarification_needed" or intent == "unknown":
|
||||
# save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=response_text) # Already saved above
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
try:
|
||||
@@ -127,11 +110,9 @@ def process_command(
|
||||
case "ask_ai":
|
||||
ai_answer = ask_ai(**params)
|
||||
responses.append(ai_answer)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=ai_answer
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "get_calendar_events":
|
||||
@@ -140,12 +121,10 @@ def process_command(
|
||||
)
|
||||
formatted_responses = format_calendar_events(events)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_calendar_event":
|
||||
@@ -159,20 +138,17 @@ def process_command(
|
||||
title = created_event.title or "Untitled Event"
|
||||
add_response = f"Added: {title} starting at {start_str}."
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_calendar_event":
|
||||
event_id = params.pop("event_id", None)
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for update."
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -188,20 +164,17 @@ def process_command(
|
||||
title = updated_event.title or "Untitled Event"
|
||||
update_response = f"Updated event ID {updated_event.id}: {title}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_calendar_event":
|
||||
event_id = params.get("event_id")
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for delete."
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -213,29 +186,24 @@ def process_command(
|
||||
delete_calendar_event(db, current_user.id, event_id)
|
||||
delete_response = f"Deleted event ID {event_id}."
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
# --- Add TODO Cases ---
|
||||
case "get_todos":
|
||||
todos: List[Todo] = todo_service.get_todos(
|
||||
db, user=current_user, **params
|
||||
)
|
||||
formatted_responses = format_todos(todos)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_todo":
|
||||
@@ -247,14 +215,12 @@ def process_command(
|
||||
f"Added TODO: '{created_todo.task}' (ID: {created_todo.id})."
|
||||
)
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_todo":
|
||||
@@ -279,14 +245,12 @@ def process_command(
|
||||
status = "complete" if params["complete"] else "incomplete"
|
||||
update_response += f" Marked as {status}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_todo":
|
||||
@@ -307,26 +271,21 @@ def process_command(
|
||||
f"Deleted TODO ID {deleted_todo.id}: '{deleted_todo.task}'."
|
||||
)
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
# --- End TODO Cases ---
|
||||
|
||||
case _:
|
||||
print(
|
||||
f"Warning: Unhandled intent '{intent}' reached api.py match statement."
|
||||
)
|
||||
# The initial response_text was already saved
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
except HTTPException as http_exc:
|
||||
# Don't save again if already saved before raising
|
||||
if http_exc.status_code != 400 or ("event_id" not in http_exc.detail.lower()):
|
||||
save_chat_message(
|
||||
db,
|
||||
@@ -340,11 +299,9 @@ def process_command(
|
||||
error_response = (
|
||||
"Sorry, I encountered an error while trying to perform that action."
|
||||
)
|
||||
# --- Save Final Error AI Response ---
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=error_response
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=[error_response])
|
||||
|
||||
|
||||
@@ -355,6 +312,3 @@ def read_chat_history(
|
||||
"""Retrieves the last 50 chat messages for the current user."""
|
||||
history = get_chat_history(db, user_id=current_user.id, limit=50)
|
||||
return history
|
||||
|
||||
|
||||
# -------------------------------------
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# /home/cdp/code/MAIA/backend/modules/nlp/models.py
|
||||
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# modules/nlp/schemas.py
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
|
||||
@@ -9,5 +8,4 @@ class ProcessCommandRequest(BaseModel):
|
||||
|
||||
class ProcessCommandResponse(BaseModel):
|
||||
responses: List[str]
|
||||
# Optional: Keep details if needed for specific frontend logic beyond display
|
||||
# details: dict | None = None
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# modules/nlp/service.py
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc # Import desc for ordering
|
||||
from google import genai
|
||||
|
||||
0
backend/modules/notifications/__init__.py
Normal file
0
backend/modules/notifications/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
111
backend/modules/notifications/service.py
Normal file
111
backend/modules/notifications/service.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import httpx
|
||||
import logging
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def send_push_notification(
|
||||
push_token: str, title: str, body: str, data: Optional[Dict[str, Any]] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Sends a push notification to a specific Expo push token.
|
||||
|
||||
Args:
|
||||
push_token: The recipient's Expo push token.
|
||||
title: The title of the notification.
|
||||
body: The main message content of the notification.
|
||||
data: Optional dictionary containing extra data to send with the notification.
|
||||
|
||||
Returns:
|
||||
True if the notification was sent successfully (according to Expo API), False otherwise.
|
||||
"""
|
||||
if not push_token:
|
||||
logger.warning("Attempted to send notification but no push token provided.")
|
||||
return False
|
||||
|
||||
message = {
|
||||
"to": push_token,
|
||||
"sound": "default",
|
||||
"title": title,
|
||||
"body": body,
|
||||
"priority": "high",
|
||||
"channelId": "default",
|
||||
}
|
||||
if data:
|
||||
message["data"] = data
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(
|
||||
settings.EXPO_PUSH_API_URL,
|
||||
headers={
|
||||
"Accept": "application/json",
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
json=message,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status() # Raise exception for 4xx/5xx responses
|
||||
|
||||
response_data = response.json()
|
||||
logger.debug(f"Expo push API response: {response_data}")
|
||||
|
||||
# Check for top-level errors first
|
||||
if "errors" in response_data:
|
||||
error_messages = [
|
||||
err.get("message", "Unknown error")
|
||||
for err in response_data["errors"]
|
||||
]
|
||||
logger.error(
|
||||
f"Expo API returned errors for {push_token[:10]}...: {'; '.join(error_messages)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check the status in the data field
|
||||
receipt = response_data.get("data")
|
||||
|
||||
# if receipts is a list
|
||||
if receipt:
|
||||
status = receipt.get("status")
|
||||
|
||||
if status == "ok":
|
||||
logger.info(
|
||||
f"Successfully sent push notification to token: {push_token[:10]}..."
|
||||
)
|
||||
return True
|
||||
else:
|
||||
# Log details if the status is not 'ok'
|
||||
error_details = receipt.get("details")
|
||||
error_message = receipt.get("message")
|
||||
logger.error(
|
||||
f"Failed to send push notification to {push_token[:10]}... "
|
||||
f"Expo status: {status}, Message: {error_message}, Details: {error_details}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
# Log if 'data' is missing, not a list, or an empty list
|
||||
logger.error(
|
||||
f"Unexpected Expo API response format or empty 'data' field for {push_token[:10]}... "
|
||||
f"Response: {response_data}"
|
||||
)
|
||||
return False
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error sending push notification to {push_token[:10]}...: {e.response.status_code} - {e.response.text}"
|
||||
)
|
||||
return False
|
||||
except httpx.RequestError as e:
|
||||
logger.error(
|
||||
f"Network error sending push notification to {push_token[:10]}...: {e}"
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Unexpected error sending push notification to {push_token[:10]}...: {e}"
|
||||
)
|
||||
return False
|
||||
@@ -1,2 +1 @@
|
||||
# backend/modules/todo/__init__.py
|
||||
# This file makes the 'todo' directory a Python package.
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,17 +1,16 @@
|
||||
# backend/modules/todo/api.py
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List
|
||||
|
||||
from . import service, schemas
|
||||
from core.database import get_db
|
||||
from modules.auth.dependencies import get_current_user # Corrected import
|
||||
from modules.auth.models import User # Assuming User model is in auth.models
|
||||
from modules.auth.dependencies import get_current_user
|
||||
from modules.auth.models import User
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/todos",
|
||||
tags=["todos"],
|
||||
dependencies=[Depends(get_current_user)], # Corrected dependency
|
||||
dependencies=[Depends(get_current_user)],
|
||||
responses={404: {"description": "Not found"}},
|
||||
)
|
||||
|
||||
@@ -20,7 +19,7 @@ router = APIRouter(
|
||||
def create_todo_endpoint(
|
||||
todo: schemas.TodoCreate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.create_todo(db=db, todo=todo, user=current_user)
|
||||
|
||||
@@ -30,7 +29,7 @@ def read_todos_endpoint(
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
todos = service.get_todos(db=db, user=current_user, skip=skip, limit=limit)
|
||||
return todos
|
||||
@@ -40,7 +39,7 @@ def read_todos_endpoint(
|
||||
def read_todo_endpoint(
|
||||
todo_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
db_todo = service.get_todo(db=db, todo_id=todo_id, user=current_user)
|
||||
if db_todo is None:
|
||||
@@ -53,7 +52,7 @@ def update_todo_endpoint(
|
||||
todo_id: int,
|
||||
todo_update: schemas.TodoUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.update_todo(
|
||||
db=db, todo_id=todo_id, todo_update=todo_update, user=current_user
|
||||
@@ -64,6 +63,6 @@ def update_todo_endpoint(
|
||||
def delete_todo_endpoint(
|
||||
todo_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user), # Corrected dependency
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
return service.delete_todo(db=db, todo_id=todo_id, user=current_user)
|
||||
|
||||
@@ -14,6 +14,4 @@ class Todo(Base):
|
||||
complete = Column(Boolean, default=False)
|
||||
owner_id = Column(Integer, ForeignKey("users.id"))
|
||||
|
||||
owner = relationship(
|
||||
"User"
|
||||
) # Add relationship if needed, assuming User model exists in auth.models
|
||||
owner = relationship("User")
|
||||
|
||||
Binary file not shown.
@@ -1,7 +1,7 @@
|
||||
# modules/user/api.py
|
||||
from typing import Annotated
|
||||
from fastapi import APIRouter, Depends
|
||||
from typing import Annotated, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.database import get_db
|
||||
from core.exceptions import not_found_exception, forbidden_exception
|
||||
@@ -12,6 +12,41 @@ from modules.auth.models import User
|
||||
router = APIRouter(prefix="/user", tags=["user"])
|
||||
|
||||
|
||||
# --- Pydantic Schema for Push Token --- #
|
||||
class PushTokenData(BaseModel):
|
||||
token: str
|
||||
device_name: Optional[str] = None
|
||||
token_type: str # Expecting 'expo'
|
||||
|
||||
|
||||
@router.post("/push-token", status_code=status.HTTP_200_OK)
|
||||
def save_push_token(
|
||||
token_data: PushTokenData,
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
):
|
||||
"""
|
||||
Save the Expo push token for the current user.
|
||||
Requires user to be logged in.
|
||||
"""
|
||||
if token_data.token_type != "expo":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid token_type. Only 'expo' is supported.",
|
||||
)
|
||||
|
||||
# Update the user's push token
|
||||
current_user.expo_push_token = token_data.token
|
||||
# Optionally, you could store device_name somewhere if needed, perhaps in a separate table
|
||||
# For now, we just update the token on the user model
|
||||
|
||||
db.add(current_user)
|
||||
db.commit()
|
||||
db.refresh(current_user)
|
||||
|
||||
return {"message": "Push token saved successfully"}
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
def me(
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pytest
|
||||
pytest_mock
|
||||
pytest-cov # For checking test coverage (optional)
|
||||
ruff # Or flake8, pylint etc. for linting
|
||||
black # For code formatting checks
|
||||
|
||||
@@ -15,3 +15,4 @@ redis
|
||||
SQLAlchemy
|
||||
starlette
|
||||
uvicorn
|
||||
eventlet
|
||||
|
||||
@@ -47,8 +47,12 @@ click-plugins==1.1.1
|
||||
# via celery
|
||||
click-repl==0.3.0
|
||||
# via celery
|
||||
dnspython==2.7.0
|
||||
# via eventlet
|
||||
ecdsa==0.19.1
|
||||
# via python-jose
|
||||
eventlet==0.39.1
|
||||
# via -r requirements.in
|
||||
fastapi==0.115.12
|
||||
# via -r requirements.in
|
||||
gevent==25.4.1
|
||||
@@ -61,6 +65,7 @@ google-genai==1.11.0
|
||||
# via -r requirements.in
|
||||
greenlet==3.2.0
|
||||
# via
|
||||
# eventlet
|
||||
# gevent
|
||||
# sqlalchemy
|
||||
h11==0.14.0
|
||||
|
||||
Binary file not shown.
@@ -1,7 +1,8 @@
|
||||
from fastapi import status
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone # Add timezone
|
||||
from pytest_mock import MockerFixture # Import MockerFixture
|
||||
|
||||
from tests.helpers import generators
|
||||
from modules.calendar.models import CalendarEvent # Assuming model exists
|
||||
@@ -10,13 +11,14 @@ from tests.conftest import fake
|
||||
|
||||
# Helper function to create an event payload
|
||||
def create_event_payload(start_offset_days=0, end_offset_days=1):
|
||||
start_time = datetime.utcnow() + timedelta(days=start_offset_days)
|
||||
end_time = datetime.utcnow() + timedelta(days=end_offset_days)
|
||||
# Ensure datetimes are timezone-aware (UTC)
|
||||
start_time = datetime.now(timezone.utc) + timedelta(days=start_offset_days)
|
||||
end_time = datetime.now(timezone.utc) + timedelta(days=end_offset_days)
|
||||
return {
|
||||
"title": fake.sentence(nb_words=3),
|
||||
"description": fake.text(),
|
||||
"start": start_time.isoformat(), # Rename start_time to start
|
||||
"end": end_time.isoformat(), # Rename end_time to end
|
||||
"start": start_time.isoformat().replace("+00:00", "Z"), # Ensure Z suffix
|
||||
"end": end_time.isoformat().replace("+00:00", "Z"), # Ensure Z suffix
|
||||
"all_day": fake.boolean(),
|
||||
}
|
||||
|
||||
@@ -31,13 +33,20 @@ def test_create_event_unauthorized(client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_create_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None:
|
||||
"""Test creating a calendar event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock the celery task sending
|
||||
mock_send_task = mocker.patch(
|
||||
"core.celery_app.celery_app.send_task"
|
||||
) # Corrected patch target
|
||||
|
||||
response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -49,7 +58,7 @@ def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
data = response.json()
|
||||
assert data["title"] == payload["title"]
|
||||
assert data["description"] == payload["description"]
|
||||
# Remove the '+ "Z"' as the API doesn't add it
|
||||
# Assert with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["end"] == payload["end"]
|
||||
assert data["all_day"] == payload["all_day"]
|
||||
@@ -62,6 +71,11 @@ def test_create_event_success(db: Session, client: TestClient) -> None:
|
||||
assert event_in_db.user_id == user.id
|
||||
assert event_in_db.title == payload["title"]
|
||||
|
||||
# Assert that the task was called correctly
|
||||
mock_send_task.assert_called_once_with(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[data["id"]]
|
||||
)
|
||||
|
||||
|
||||
# --- Test Get Events ---
|
||||
|
||||
@@ -72,36 +86,49 @@ def test_get_events_unauthorized(client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_get_events_success(db: Session, client: TestClient) -> None:
|
||||
def test_get_events_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting all calendar events for a user."""
|
||||
user, password = generators.create_user(db)
|
||||
user, password = generators.create_user(
|
||||
db, username="testuser_get_events"
|
||||
) # Unique username
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
# Create a couple of events for the user
|
||||
payload1 = create_event_payload(0, 1)
|
||||
client.post(
|
||||
create_rsp1 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload1,
|
||||
)
|
||||
assert create_rsp1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload2 = create_event_payload(2, 3)
|
||||
client.post(
|
||||
create_rsp2 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload2,
|
||||
)
|
||||
assert create_rsp2.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Create an event for another user (should not be returned)
|
||||
other_user, other_password = generators.create_user(db)
|
||||
other_user, other_password = generators.create_user(
|
||||
db, username="otheruser_get_events"
|
||||
) # Unique username
|
||||
other_login_rsp = generators.login(db, other_user.username, other_password)
|
||||
other_access_token = other_login_rsp["access_token"]
|
||||
other_payload = create_event_payload(4, 5)
|
||||
client.post(
|
||||
create_rsp_other = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {other_access_token}"},
|
||||
json=other_payload,
|
||||
)
|
||||
assert create_rsp_other.status_code == status.HTTP_201_CREATED
|
||||
|
||||
response = client.get(
|
||||
"/api/calendar/events", headers={"Authorization": f"Bearer {access_token}"}
|
||||
@@ -115,35 +142,51 @@ def test_get_events_success(db: Session, client: TestClient) -> None:
|
||||
assert data[1]["user_id"] == user.id
|
||||
|
||||
|
||||
def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
def test_get_events_filtered(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting filtered calendar events for a user."""
|
||||
user, password = generators.create_user(db)
|
||||
user, password = generators.create_user(
|
||||
db, username="testuser_filter_events"
|
||||
) # Unique username
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
# Create events
|
||||
payload1 = create_event_payload(0, 1) # Today -> Tomorrow
|
||||
client.post(
|
||||
create_rsp1 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload1,
|
||||
)
|
||||
assert create_rsp1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload2 = create_event_payload(5, 6) # In 5 days -> In 6 days
|
||||
client.post(
|
||||
create_rsp2 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload2,
|
||||
)
|
||||
assert create_rsp2.status_code == status.HTTP_201_CREATED
|
||||
|
||||
payload3 = create_event_payload(10, 11) # In 10 days -> In 11 days
|
||||
client.post(
|
||||
create_rsp3 = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload3,
|
||||
)
|
||||
assert create_rsp3.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Filter for events starting within the next week
|
||||
start_filter = datetime.utcnow().isoformat()
|
||||
end_filter = (datetime.utcnow() + timedelta(days=7)).isoformat()
|
||||
start_filter = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
end_filter = (
|
||||
(datetime.now(timezone.utc) + timedelta(days=7))
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
"/api/calendar/events",
|
||||
@@ -157,7 +200,11 @@ def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
assert data[1]["title"] == payload2["title"]
|
||||
|
||||
# Filter for events starting after 8 days
|
||||
start_filter_late = (datetime.utcnow() + timedelta(days=8)).isoformat()
|
||||
start_filter_late = (
|
||||
(datetime.now(timezone.utc) + timedelta(days=8))
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
response = client.get(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -172,34 +219,48 @@ def test_get_events_filtered(db: Session, client: TestClient) -> None:
|
||||
# --- Test Get Event By ID ---
|
||||
|
||||
|
||||
def test_get_event_by_id_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting a specific event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.get(f"/api/calendar/events/{event_id}")
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_get_event_by_id_success(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting a specific event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.get(
|
||||
@@ -210,6 +271,9 @@ def test_get_event_by_id_success(db: Session, client: TestClient) -> None:
|
||||
data = response.json()
|
||||
assert data["id"] == event_id
|
||||
assert data["title"] == payload["title"]
|
||||
# Assert datetime with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["end"] == payload["end"]
|
||||
assert data["user_id"] == user.id
|
||||
|
||||
|
||||
@@ -227,20 +291,31 @@ def test_get_event_by_id_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_get_event_by_id_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_get_event_by_id_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test getting another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_get"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_get"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to get user1's event
|
||||
@@ -259,17 +334,24 @@ def test_get_event_by_id_forbidden(db: Session, client: TestClient) -> None:
|
||||
# --- Test Update Event ---
|
||||
|
||||
|
||||
def test_update_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating an event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
update_payload = {"title": "Updated Title"}
|
||||
|
||||
@@ -277,12 +359,20 @@ def test_update_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating an event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch(
|
||||
"core.celery_app.celery_app.send_task", return_value=None
|
||||
) # Mock for creation
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -299,6 +389,13 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
"all_day": not payload["all_day"], # Toggle all_day
|
||||
}
|
||||
|
||||
# Mock celery task for update (needs separate mock)
|
||||
mock_send_task_update = mocker.patch(
|
||||
"modules.calendar.service.celery_app.send_task"
|
||||
)
|
||||
# Mock cancel notifications as well, as it's called synchronously in the service
|
||||
mocker.patch("modules.calendar.tasks.cancel_event_notifications")
|
||||
|
||||
response = client.patch(
|
||||
f"/api/calendar/events/{event_id}",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -310,7 +407,8 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
assert data["title"] == update_payload["title"]
|
||||
assert data["description"] == update_payload["description"]
|
||||
assert data["all_day"] == update_payload["all_day"]
|
||||
assert data["start"] == payload["start"] # Check correct field name 'start'
|
||||
# Assert datetime with Z suffix
|
||||
assert data["start"] == payload["start"]
|
||||
assert data["user_id"] == user.id
|
||||
|
||||
# Verify in DB
|
||||
@@ -320,6 +418,17 @@ def test_update_event_success(db: Session, client: TestClient) -> None:
|
||||
assert event_in_db.description == update_payload["description"]
|
||||
assert event_in_db.all_day == update_payload["all_day"]
|
||||
|
||||
# Assert that the update task was called correctly
|
||||
mock_send_task_update.assert_called_once_with(
|
||||
"modules.calendar.tasks.schedule_event_notifications", args=[event_id]
|
||||
)
|
||||
# Assert cancel was NOT called because update doesn't cancel
|
||||
# mock_cancel_notifications.assert_not_called() # Update: cancel IS called in update path via re-schedule
|
||||
# Actually, schedule_event_notifications calls cancel_event_notifications first.
|
||||
# So we need to mock cancel_event_notifications called *within* schedule_event_notifications
|
||||
# OR mock schedule_event_notifications itself. Let's stick to mocking send_task.
|
||||
# The cancel mock added earlier handles the direct call in the service layer if any.
|
||||
|
||||
|
||||
def test_update_event_not_found(db: Session, client: TestClient) -> None:
|
||||
"""Test updating a non-existent event."""
|
||||
@@ -337,20 +446,31 @@ def test_update_event_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_update_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_update_event_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test updating another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_update"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_update"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to update user1's event
|
||||
@@ -371,29 +491,42 @@ def test_update_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
# --- Test Delete Event ---
|
||||
|
||||
|
||||
def test_delete_event_unauthorized(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_unauthorized(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test deleting an event without authentication."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
response = client.delete(f"/api/calendar/events/{event_id}")
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
def test_delete_event_success(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_success(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None:
|
||||
"""Test deleting an event successfully."""
|
||||
user, password = generators.create_user(db)
|
||||
login_rsp = generators.login(db, user.username, password)
|
||||
access_token = login_rsp["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock the celery task sending for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
@@ -408,12 +541,20 @@ def test_delete_event_success(db: Session, client: TestClient) -> None:
|
||||
event_in_db = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
assert event_in_db is not None
|
||||
|
||||
# Mock the cancel_event_notifications function to prevent Redis call
|
||||
mock_cancel_notifications = mocker.patch(
|
||||
"modules.calendar.service.cancel_event_notifications" # Target the function as used in service.py
|
||||
)
|
||||
|
||||
response = client.delete(
|
||||
f"/api/calendar/events/{event_id}",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# Assert that cancel_event_notifications was called
|
||||
mock_cancel_notifications.assert_called_once_with(event_id)
|
||||
|
||||
# Verify event is deleted from DB
|
||||
event_in_db = db.query(CalendarEvent).filter(CalendarEvent.id == event_id).first()
|
||||
assert event_in_db is None
|
||||
@@ -441,20 +582,31 @@ def test_delete_event_not_found(db: Session, client: TestClient) -> None:
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_delete_event_forbidden(db: Session, client: TestClient) -> None:
|
||||
def test_delete_event_forbidden(
|
||||
db: Session, client: TestClient, mocker: MockerFixture
|
||||
) -> None: # Add mocker
|
||||
"""Test deleting another user's event."""
|
||||
user1, password_user1 = generators.create_user(db)
|
||||
user2, password_user2 = generators.create_user(db)
|
||||
user1, password_user1 = generators.create_user(
|
||||
db, username="user1_forbidden_delete"
|
||||
) # Unique username
|
||||
user2, password_user2 = generators.create_user(
|
||||
db, username="user2_forbidden_delete"
|
||||
) # Unique username
|
||||
|
||||
# Log in as user1 and create an event
|
||||
login_rsp1 = generators.login(db, user1.username, password_user1)
|
||||
access_token1 = login_rsp1["access_token"]
|
||||
payload = create_event_payload()
|
||||
|
||||
# Mock celery task for creation
|
||||
mocker.patch("core.celery_app.celery_app.send_task")
|
||||
|
||||
create_response = client.post(
|
||||
"/api/calendar/events",
|
||||
headers={"Authorization": f"Bearer {access_token1}"},
|
||||
json=payload,
|
||||
)
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
event_id = create_response.json()["id"]
|
||||
|
||||
# Log in as user2 and try to delete user1's event
|
||||
|
||||
2
interfaces/nativeapp/.env.cam
Normal file
2
interfaces/nativeapp/.env.cam
Normal file
@@ -0,0 +1,2 @@
|
||||
EXPO_PUBLIC_API_URL='https://maia.depaoli.id.au/api'
|
||||
EXPO_PROJECT_ID='au.com.seedeep.maia'
|
||||
@@ -1,5 +1,5 @@
|
||||
// App.tsx
|
||||
import React, { useCallback } from 'react'; // Removed useEffect, useState as they are implicitly used by useFonts
|
||||
import React, { useCallback, useEffect } from 'react'; // Add useEffect
|
||||
import { Platform, View } from 'react-native';
|
||||
import { Provider as PaperProvider } from 'react-native-paper';
|
||||
import { NavigationContainer, DarkTheme as NavigationDarkTheme } from '@react-navigation/native'; // Import NavigationDarkTheme
|
||||
@@ -8,10 +8,14 @@ import { StatusBar } from 'expo-status-bar';
|
||||
import * as SplashScreen from 'expo-splash-screen';
|
||||
import { useFonts } from 'expo-font';
|
||||
|
||||
import { AuthProvider } from './src/contexts/AuthContext';
|
||||
import { AuthProvider, useAuth } from './src/contexts/AuthContext'; // Import useAuth
|
||||
import RootNavigator from './src/navigation/RootNavigator';
|
||||
import theme from './src/constants/theme'; // This is the Paper theme
|
||||
// Removed CombinedDarkTheme import as we'll use NavigationDarkTheme directly for NavigationContainer
|
||||
import theme from './src/constants/theme';
|
||||
import {
|
||||
registerForPushNotificationsAsync,
|
||||
sendPushTokenToBackend,
|
||||
setupNotificationHandlers
|
||||
} from './src/services/notificationService'; // Import notification functions
|
||||
|
||||
// Keep the splash screen visible while we fetch resourcesDone, please go ahead with the changes.
|
||||
SplashScreen.preventAutoHideAsync();
|
||||
@@ -30,6 +34,43 @@ const navigationTheme = {
|
||||
},
|
||||
};
|
||||
|
||||
// Wrapper component to handle notification logic after auth state is known
|
||||
function AppContent() {
|
||||
const { user } = useAuth(); // Get user state
|
||||
|
||||
useEffect(() => {
|
||||
// Setup notification handlers (listeners)
|
||||
const cleanupNotificationHandlers = setupNotificationHandlers();
|
||||
|
||||
// Register for push notifications only if user is logged in
|
||||
const registerAndSendToken = async () => {
|
||||
if (user) { // Only register if logged in
|
||||
console.log('[App] User logged in, attempting to register for push notifications...');
|
||||
const token = await registerForPushNotificationsAsync();
|
||||
if (token) {
|
||||
console.log('[App] Push token obtained, sending to backend...');
|
||||
await sendPushTokenToBackend(token);
|
||||
} else {
|
||||
console.log('[App] Could not get push token.');
|
||||
}
|
||||
} else {
|
||||
console.log('[App] User not logged in, skipping push notification registration.');
|
||||
// Optionally: If you need to clear the token on the backend when logged out,
|
||||
// you might need a separate API call here or handle it server-side based on user activity.
|
||||
}
|
||||
};
|
||||
|
||||
registerAndSendToken();
|
||||
|
||||
// Cleanup listeners on component unmount
|
||||
return () => {
|
||||
cleanupNotificationHandlers();
|
||||
};
|
||||
}, [user]); // Re-run when user logs in or out
|
||||
|
||||
return <RootNavigator />;
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
const [fontsLoaded, fontError] = useFonts({
|
||||
'Inter-Regular': require('./src/assets/fonts/Inter-Regular.ttf'),
|
||||
@@ -63,7 +104,8 @@ export default function App() {
|
||||
<PaperProvider theme={theme}>
|
||||
{/* NavigationContainer uses the simplified navigationTheme */}
|
||||
<NavigationContainer theme={navigationTheme}>
|
||||
<RootNavigator />
|
||||
{/* Use AppContent which contains RootNavigator and notification logic */}
|
||||
<AppContent />
|
||||
</NavigationContainer>
|
||||
<StatusBar
|
||||
style="light" // Assuming dark theme
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"expo": {
|
||||
"name": "webapp",
|
||||
"name": "MAIA",
|
||||
"slug": "webapp",
|
||||
"version": "1.0.0",
|
||||
"orientation": "portrait",
|
||||
"icon": "./assets/icon.png",
|
||||
"userInterfaceStyle": "light",
|
||||
"icon": "./src/assets/MAIA_ICON.png",
|
||||
"userInterfaceStyle": "dark",
|
||||
"newArchEnabled": true,
|
||||
"splash": {
|
||||
"image": "./assets/splash-icon.png",
|
||||
"image": "./src/assets/MAIA_ICON.png",
|
||||
"resizeMode": "contain",
|
||||
"backgroundColor": "#ffffff"
|
||||
},
|
||||
@@ -17,10 +17,12 @@
|
||||
},
|
||||
"android": {
|
||||
"adaptiveIcon": {
|
||||
"foregroundImage": "./assets/adaptive-icon.png",
|
||||
"foregroundImage": "./src/assets/MAIA_ICON.png",
|
||||
"backgroundColor": "#ffffff"
|
||||
},
|
||||
"softwareKeyboardLayoutMode": "resize"
|
||||
"softwareKeyboardLayoutMode": "resize",
|
||||
"package": "au.com.seedeep.maia",
|
||||
"googleServicesFile": "./google-services.json"
|
||||
},
|
||||
"web": {
|
||||
"favicon": "./assets/favicon.png"
|
||||
@@ -28,6 +30,12 @@
|
||||
"plugins": [
|
||||
"expo-secure-store",
|
||||
"expo-font"
|
||||
]
|
||||
],
|
||||
"extra": {
|
||||
"eas": {
|
||||
"projectId": "4d7d70ce-a4d8-4307-8827-8ef713b95b78"
|
||||
}
|
||||
},
|
||||
"owner": "cdp202"
|
||||
}
|
||||
}
|
||||
|
||||
21
interfaces/nativeapp/eas.json
Normal file
21
interfaces/nativeapp/eas.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"cli": {
|
||||
"version": ">= 16.3.2",
|
||||
"appVersionSource": "remote"
|
||||
},
|
||||
"build": {
|
||||
"development": {
|
||||
"developmentClient": true,
|
||||
"distribution": "internal"
|
||||
},
|
||||
"preview": {
|
||||
"distribution": "internal"
|
||||
},
|
||||
"production": {
|
||||
"autoIncrement": true
|
||||
}
|
||||
},
|
||||
"submit": {
|
||||
"production": {}
|
||||
}
|
||||
}
|
||||
29
interfaces/nativeapp/google-services.json
Normal file
29
interfaces/nativeapp/google-services.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"project_info": {
|
||||
"project_number": "190108602323",
|
||||
"project_id": "maia-4ddcf",
|
||||
"storage_bucket": "maia-4ddcf.firebasestorage.app"
|
||||
},
|
||||
"client": [
|
||||
{
|
||||
"client_info": {
|
||||
"mobilesdk_app_id": "1:190108602323:android:dd073dd13774d87d64a926",
|
||||
"android_client_info": {
|
||||
"package_name": "au.com.seedeep.maia"
|
||||
}
|
||||
},
|
||||
"oauth_client": [],
|
||||
"api_key": [
|
||||
{
|
||||
"current_key": "AIzaSyBrKtXnwNq_fX3B5ak3kKWFZ4V87-llsEo"
|
||||
}
|
||||
],
|
||||
"services": {
|
||||
"appinvite_service": {
|
||||
"other_platform_oauth_client": []
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"configuration_version": "1"
|
||||
}
|
||||
13
interfaces/nativeapp/maia-firebase-private-key.json
Normal file
13
interfaces/nativeapp/maia-firebase-private-key.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "maia-4ddcf",
|
||||
"private_key_id": "8ea1d5b1110f712c1ea863442a267e8b35b2aca7",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDkpd2/2cXAhhtg\n8ogpg6zp4LRQ4+YrHbnMRI4nccHxf8/YGgfi5hEs6OXDLT4bb9FbHMIsq8h6pJXe\nWnkdNaEaAqeebQ83pT7bQKsTDCx/YXenJ31rrwTzq4cjcBhwd04fIfH1bu7vd7ru\nJHFlsf7/Zb93yahfCV0yyP22FIeskIhqUutWY7RTpm6zUFlKs8QKIKiWVOTJiKvo\nNAcUK4BDmeDRKF/2wdFjgkXl7R6Ev9UzWf2+gE19RJY8ml25lGzG+fWLnnhx092x\naClGim3G0FRhQr5iyN++2Q82stWyRS7R85jRb8s/b3LT0knVrPrAAQasBHVcSSfp\n6MO4flp7AgMBAAECggEAGqyE9ZQ0vzSF7iXtH5a2beRidMtZdy81FTDsOorJWuCT\nwTysLdq0Jz6WS1I0XCQL0urEdkymCzS3LST12yP+AthLcK59Z3r2HcLqEkNJz6Rx\nvoTbW1wkIj8g+U/i8f/hE720ifLimfooSw7iUcBVpLrcft9+LnQbtMiA3KSBfW54\nmzYLWanXgBhKVMiGyR3FpnzDoLcO3xbItsLhaF/DlNN5FTvDCNQCQQwrlzkTTC+Q\npBf/Va+UMljIOYWaNfhgwzsiO86KpmKyWiVd+lhnrZfj/KEZjX+e8InSYd/D3dqn\nwnXY84pwRi2THCY0Hs2iDiX9uEnnq6fwh1I4B2xUIQKBgQD4msFXpl6+CU0iepmz\n2xpvo9AFX/VoQYoDz73DnCjcbFxldX6lWy8gEryQCPbB3Ir48xO+/OdVS2xCiSlx\nj+RqlIGf7oPHxEAJyJpiu93n/Zug/EJovjX5PxyD6Ye6ECr23yQfK20YRM/mdlJp\nm/0cZ7jEkXQLermDK1BAtUGd2wKBgQDrcyG47mjwZj9vG/Besl0VX+OTvlxrd2Gx\nAC7e27xkgNViSd8gZTGna0+Kp+sk6iP9h1KAqbFqpQejGPPvxtLkDuFbffjOWNoa\nKd9ERBxf0MEP2/dWiyusDDm+FvhSYAnKfHmtEQc+DMJ+5bNujDuRRcfrXxnmNEdt\n/WcpZ8bn4QKBgA8LXnPtb4JUkcRqYu7NbZYf9bC9k95RSQbeBX/W7WoZbKX/LEDZ\necqZF6wnvrcQn6BdJW7DY0R4If8MyeNDb/E7N3T0PClUqQNujlk3QUCOymI9oc8w\n45dHyHP7J+mMnOz/p/Hy8NEtKN+rfWVCuViEtlu+6aTgMmXLszmXPndNAoGAXh6Z\n/mkffeoBtZK/lbtLRn4cZTUVkMgaPz1Jf0DroGl342CQV0zceoaFN3JEp28JkBGG\nQ3SSPYVW9jXFXbZnG09verlyuln+ZbMTUyC/DvZOFt7hkrDzdkU01+4quhM2FsGH\nik1iTcWgAkYkYi6gqUPx1P8hRUrkuu0vTff0JUECgYBUf3Jhoh6XqLMMdnQvEj1Z\ndcrzdKFoSCB9sVuBqaEFu5sHQwc3HIodXGW1LT0eA7N0UAs4AZViNxCfMKCYoH13\nUIP2+EGy+a2RNkoezEANG0wwRa49yot8aDYQRNvKORIdkD10RIVORb0RJPldTpGP\nl9FKkEe5IAsEbwyn3pNmSQ==\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "firebase-adminsdk-fbsvc@maia-4ddcf.iam.gserviceaccount.com",
|
||||
"client_id": "100360447602089015870",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-fbsvc%40maia-4ddcf.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
495
interfaces/nativeapp/package-lock.json
generated
495
interfaces/nativeapp/package-lock.json
generated
@@ -18,7 +18,10 @@
|
||||
"axios": "^1.8.4",
|
||||
"date-fns": "^4.1.0",
|
||||
"expo": "^52.0.46",
|
||||
"expo-dev-client": "~5.0.20",
|
||||
"expo-device": "~7.0.3",
|
||||
"expo-font": "~13.0.4",
|
||||
"expo-notifications": "~0.29.14",
|
||||
"expo-secure-store": "~14.0.1",
|
||||
"expo-splash-screen": "~0.29.24",
|
||||
"expo-status-bar": "~2.0.1",
|
||||
@@ -2681,6 +2684,11 @@
|
||||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@ide/backoff": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@ide/backoff/-/backoff-1.0.0.tgz",
|
||||
"integrity": "sha512-F0YfUDjvT+Mtt/R4xdl2X0EYCHMMiJqNLdxHD++jDT5ydEFIyqbCHh51Qx2E211dgZprPKhV7sHmnXKpLuvc5g=="
|
||||
},
|
||||
"node_modules/@isaacs/cliui": {
|
||||
"version": "8.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
||||
@@ -3746,6 +3754,21 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv": {
|
||||
"version": "8.11.0",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz",
|
||||
"integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2",
|
||||
"uri-js": "^4.2.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/anser": {
|
||||
"version": "1.4.10",
|
||||
"resolved": "https://registry.npmjs.org/anser/-/anser-1.4.10.tgz",
|
||||
@@ -3852,6 +3875,18 @@
|
||||
"resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
|
||||
"integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="
|
||||
},
|
||||
"node_modules/assert": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/assert/-/assert-2.1.0.tgz",
|
||||
"integrity": "sha512-eLHpSK/Y4nhMJ07gDaAzoX/XAKS8PSaojml3M0DM4JpV1LAi5JOJ/p6H/XWrl8L+DzVEvVCW1z3vWAaB9oTsQw==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.2",
|
||||
"is-nan": "^1.3.2",
|
||||
"object-is": "^1.1.5",
|
||||
"object.assign": "^4.1.4",
|
||||
"util": "^0.12.5"
|
||||
}
|
||||
},
|
||||
"node_modules/ast-types": {
|
||||
"version": "0.15.2",
|
||||
"resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.15.2.tgz",
|
||||
@@ -3890,6 +3925,20 @@
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/available-typed-arrays": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
|
||||
"integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
|
||||
"dependencies": {
|
||||
"possible-typed-array-names": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.8.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz",
|
||||
@@ -4095,6 +4144,11 @@
|
||||
"@babel/core": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/badgin": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/badgin/-/badgin-1.2.3.tgz",
|
||||
"integrity": "sha512-NQGA7LcfCpSzIbGRbkgjgdWkjy7HI+Th5VLxTJfW5EeaAf3fnS+xWQaQOCYiny+q6QSvxqoSO04vCx+4u++EJw=="
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
@@ -4314,6 +4368,23 @@
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
|
||||
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="
|
||||
},
|
||||
"node_modules/call-bind": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
|
||||
"integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.0",
|
||||
"es-define-property": "^1.0.0",
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"set-function-length": "^1.2.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||
@@ -4326,6 +4397,21 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bound": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
|
||||
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"get-intrinsic": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/caller-callsite": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz",
|
||||
@@ -4866,6 +4952,22 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/define-data-property": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
|
||||
"integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
|
||||
"dependencies": {
|
||||
"es-define-property": "^1.0.0",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/define-lazy-prop": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz",
|
||||
@@ -4874,6 +4976,22 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/define-properties": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
|
||||
"integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
|
||||
"dependencies": {
|
||||
"define-data-property": "^1.0.1",
|
||||
"has-property-descriptors": "^1.0.0",
|
||||
"object-keys": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/del": {
|
||||
"version": "6.1.1",
|
||||
"resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz",
|
||||
@@ -5278,6 +5396,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/expo-application": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/expo-application/-/expo-application-6.0.2.tgz",
|
||||
"integrity": "sha512-qcj6kGq3mc7x5yIb5KxESurFTJCoEKwNEL34RdPEvTB/xhl7SeVZlu05sZBqxB1V4Ryzq/LsCb7NHNfBbb3L7A==",
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-asset": {
|
||||
"version": "11.0.5",
|
||||
"resolved": "https://registry.npmjs.org/expo-asset/-/expo-asset-11.0.5.tgz",
|
||||
@@ -5307,6 +5433,90 @@
|
||||
"react-native": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-dev-client": {
|
||||
"version": "5.0.20",
|
||||
"resolved": "https://registry.npmjs.org/expo-dev-client/-/expo-dev-client-5.0.20.tgz",
|
||||
"integrity": "sha512-bLNkHdU7V3I4UefgJbJnIDUBUL0LxIal/xYEx9BbgDd3B7wgQKY//+BpPIxBOKCQ22lkyiHY8y9tLhO903sAgg==",
|
||||
"dependencies": {
|
||||
"expo-dev-launcher": "5.0.35",
|
||||
"expo-dev-menu": "6.0.25",
|
||||
"expo-dev-menu-interface": "1.9.3",
|
||||
"expo-manifests": "~0.15.8",
|
||||
"expo-updates-interface": "~1.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-dev-launcher": {
|
||||
"version": "5.0.35",
|
||||
"resolved": "https://registry.npmjs.org/expo-dev-launcher/-/expo-dev-launcher-5.0.35.tgz",
|
||||
"integrity": "sha512-hEQr0ZREnUMxZ6wtQgfK1lzYnbb0zar3HqYZhmANzXmE6UEPbQ4GByLzhpfz/d+xxdBVQZsrHdtiV28KPG2sog==",
|
||||
"dependencies": {
|
||||
"ajv": "8.11.0",
|
||||
"expo-dev-menu": "6.0.25",
|
||||
"expo-manifests": "~0.15.8",
|
||||
"resolve-from": "^5.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-dev-menu": {
|
||||
"version": "6.0.25",
|
||||
"resolved": "https://registry.npmjs.org/expo-dev-menu/-/expo-dev-menu-6.0.25.tgz",
|
||||
"integrity": "sha512-K2m4z/I+CPWbMtHlDzU68lHaQs52De0v5gbsjAmA5ig8FrYh4MKZvPxSVANaiKENzgmtglu8qaFh7ua9Gt2TfA==",
|
||||
"dependencies": {
|
||||
"expo-dev-menu-interface": "1.9.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-dev-menu-interface": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/expo-dev-menu-interface/-/expo-dev-menu-interface-1.9.3.tgz",
|
||||
"integrity": "sha512-KY/dWTBE1l47i9V366JN5rC6YIdOc9hz8yAmZzkl5DrPia5l3M2WIjtnpHC9zUkNjiSiG2urYoOAq4H/uLdmyg==",
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-device": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/expo-device/-/expo-device-7.0.3.tgz",
|
||||
"integrity": "sha512-uNGhDYmpDj/3GySWZmRiYSt52Phdim11p0pXfgpCq/nMks0+UPZwl3D0vin5N8/gpVe5yzb13GYuFxiVoDyniw==",
|
||||
"dependencies": {
|
||||
"ua-parser-js": "^0.7.33"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-device/node_modules/ua-parser-js": {
|
||||
"version": "0.7.40",
|
||||
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.40.tgz",
|
||||
"integrity": "sha512-us1E3K+3jJppDBa3Tl0L3MOJiGhe1C6P0+nIvQAFYbxlMAx0h81eOwLmU57xgqToduDDPx3y5QsdjPfDu+FgOQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/ua-parser-js"
|
||||
},
|
||||
{
|
||||
"type": "paypal",
|
||||
"url": "https://paypal.me/faisalman"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/faisalman"
|
||||
}
|
||||
],
|
||||
"bin": {
|
||||
"ua-parser-js": "script/cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-file-system": {
|
||||
"version": "18.0.12",
|
||||
"resolved": "https://registry.npmjs.org/expo-file-system/-/expo-file-system-18.0.12.tgz",
|
||||
@@ -5331,6 +5541,11 @@
|
||||
"react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-json-utils": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/expo-json-utils/-/expo-json-utils-0.14.0.tgz",
|
||||
"integrity": "sha512-xjGfK9dL0B1wLnOqNkX0jM9p48Y0I5xEPzHude28LY67UmamUyAACkqhZGaPClyPNfdzczk7Ej6WaRMT3HfXvw=="
|
||||
},
|
||||
"node_modules/expo-keep-awake": {
|
||||
"version": "14.0.3",
|
||||
"resolved": "https://registry.npmjs.org/expo-keep-awake/-/expo-keep-awake-14.0.3.tgz",
|
||||
@@ -5340,6 +5555,18 @@
|
||||
"react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-manifests": {
|
||||
"version": "0.15.8",
|
||||
"resolved": "https://registry.npmjs.org/expo-manifests/-/expo-manifests-0.15.8.tgz",
|
||||
"integrity": "sha512-VuIyaMfRfLZeETNsRohqhy1l7iZ7I+HKMPfZXVL2Yn17TT0WkOhZoq1DzYwPbOHPgp1Uk6phNa86EyaHrD2DLw==",
|
||||
"dependencies": {
|
||||
"@expo/config": "~10.0.11",
|
||||
"expo-json-utils": "~0.14.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-modules-autolinking": {
|
||||
"version": "2.0.8",
|
||||
"resolved": "https://registry.npmjs.org/expo-modules-autolinking/-/expo-modules-autolinking-2.0.8.tgz",
|
||||
@@ -5399,6 +5626,25 @@
|
||||
"invariant": "^2.2.4"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-notifications": {
|
||||
"version": "0.29.14",
|
||||
"resolved": "https://registry.npmjs.org/expo-notifications/-/expo-notifications-0.29.14.tgz",
|
||||
"integrity": "sha512-AVduNx9mKOgcAqBfrXS1OHC9VAQZrDQLbVbcorMjPDGXW7m0Q5Q+BG6FYM/saVviF2eO8fhQRsTT40yYv5/bhQ==",
|
||||
"dependencies": {
|
||||
"@expo/image-utils": "^0.6.5",
|
||||
"@ide/backoff": "^1.0.0",
|
||||
"abort-controller": "^3.0.0",
|
||||
"assert": "^2.0.0",
|
||||
"badgin": "^1.1.5",
|
||||
"expo-application": "~6.0.2",
|
||||
"expo-constants": "~17.0.8"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"expo": "*",
|
||||
"react": "*",
|
||||
"react-native": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-secure-store": {
|
||||
"version": "14.0.1",
|
||||
"resolved": "https://registry.npmjs.org/expo-secure-store/-/expo-secure-store-14.0.1.tgz",
|
||||
@@ -5427,6 +5673,14 @@
|
||||
"react-native": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/expo-updates-interface": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/expo-updates-interface/-/expo-updates-interface-1.0.0.tgz",
|
||||
"integrity": "sha512-93oWtvULJOj+Pp+N/lpTcFfuREX1wNeHtp7Lwn8EbzYYmdn37MvZU3TPW2tYYCZuhzmKEXnUblYcruYoDu7IrQ==",
|
||||
"peerDependencies": {
|
||||
"expo": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/exponential-backoff": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.2.tgz",
|
||||
@@ -5624,6 +5878,20 @@
|
||||
"resolved": "https://registry.npmjs.org/fontfaceobserver/-/fontfaceobserver-2.3.0.tgz",
|
||||
"integrity": "sha512-6FPvD/IVyT4ZlNe7Wcn5Fb/4ChigpucKYSvD6a+0iMoLn2inpo711eyIcKjmDtE5XNcgAkSH9uN/nfAeZzHEfg=="
|
||||
},
|
||||
"node_modules/for-each": {
|
||||
"version": "0.3.5",
|
||||
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
|
||||
"integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
|
||||
"dependencies": {
|
||||
"is-callable": "^1.2.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/foreground-child": {
|
||||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
|
||||
@@ -5900,6 +6168,17 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/has-property-descriptors": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
|
||||
"integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
|
||||
"dependencies": {
|
||||
"es-define-property": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
@@ -6156,6 +6435,21 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/is-arguments": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz",
|
||||
"integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"has-tostringtag": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-arrayish": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
|
||||
@@ -6166,6 +6460,17 @@
|
||||
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
|
||||
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
|
||||
},
|
||||
"node_modules/is-callable": {
|
||||
"version": "1.2.7",
|
||||
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
|
||||
"integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-core-module": {
|
||||
"version": "2.16.1",
|
||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
||||
@@ -6218,6 +6523,23 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-generator-function": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
|
||||
"integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.3",
|
||||
"get-proto": "^1.0.0",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"safe-regex-test": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-glob": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
@@ -6229,6 +6551,21 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-nan": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz",
|
||||
"integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
@@ -6272,6 +6609,23 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-regex": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
|
||||
"integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"gopd": "^1.2.0",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-stream": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz",
|
||||
@@ -6280,6 +6634,20 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-typed-array": {
|
||||
"version": "1.1.15",
|
||||
"resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
|
||||
"integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
|
||||
"dependencies": {
|
||||
"which-typed-array": "^1.1.16"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-wsl": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz",
|
||||
@@ -6596,6 +6964,11 @@
|
||||
"resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz",
|
||||
"integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw=="
|
||||
},
|
||||
"node_modules/json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
|
||||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
|
||||
},
|
||||
"node_modules/json5": {
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
|
||||
@@ -7819,6 +8192,48 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/object-is": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.6.tgz",
|
||||
"integrity": "sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.7",
|
||||
"define-properties": "^1.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/object-keys": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
|
||||
"integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/object.assign": {
|
||||
"version": "4.1.7",
|
||||
"resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz",
|
||||
"integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.8",
|
||||
"call-bound": "^1.0.3",
|
||||
"define-properties": "^1.2.1",
|
||||
"es-object-atoms": "^1.0.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"object-keys": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/on-finished": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
|
||||
@@ -8258,6 +8673,14 @@
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/possible-typed-array-names": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
|
||||
"integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.49",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz",
|
||||
@@ -9204,6 +9627,22 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/safe-regex-test": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz",
|
||||
"integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"es-errors": "^1.3.0",
|
||||
"is-regex": "^1.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/sax": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz",
|
||||
@@ -9393,6 +9832,22 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/set-function-length": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
|
||||
"integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
|
||||
"dependencies": {
|
||||
"define-data-property": "^1.1.4",
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2",
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"gopd": "^1.0.1",
|
||||
"has-property-descriptors": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/setimmediate": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
|
||||
@@ -10308,6 +10763,14 @@
|
||||
"browserslist": ">= 4.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uri-js": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
|
||||
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
|
||||
"dependencies": {
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/use-latest-callback": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/use-latest-callback/-/use-latest-callback-0.2.3.tgz",
|
||||
@@ -10324,6 +10787,18 @@
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
|
||||
"integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"is-arguments": "^1.0.4",
|
||||
"is-generator-function": "^1.0.7",
|
||||
"is-typed-array": "^1.1.3",
|
||||
"which-typed-array": "^1.1.2"
|
||||
}
|
||||
},
|
||||
"node_modules/utils-merge": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||
@@ -10444,6 +10919,26 @@
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/which-typed-array": {
|
||||
"version": "1.1.19",
|
||||
"resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
|
||||
"integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
|
||||
"dependencies": {
|
||||
"available-typed-arrays": "^1.0.7",
|
||||
"call-bind": "^1.0.8",
|
||||
"call-bound": "^1.0.4",
|
||||
"for-each": "^0.3.5",
|
||||
"get-proto": "^1.0.1",
|
||||
"gopd": "^1.2.0",
|
||||
"has-tostringtag": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/wonka": {
|
||||
"version": "6.3.5",
|
||||
"resolved": "https://registry.npmjs.org/wonka/-/wonka-6.3.5.tgz",
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
"main": "index.ts",
|
||||
"scripts": {
|
||||
"start": "expo start",
|
||||
"android": "expo start --android",
|
||||
"ios": "expo start --ios",
|
||||
"android": "expo run:android",
|
||||
"ios": "expo run:ios",
|
||||
"web": "expo start --web"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -33,7 +33,10 @@
|
||||
"react-native-safe-area-context": "4.12.0",
|
||||
"react-native-screens": "~4.4.0",
|
||||
"react-native-vector-icons": "^10.2.0",
|
||||
"react-native-web": "~0.19.13"
|
||||
"react-native-web": "~0.19.13",
|
||||
"expo-dev-client": "~5.0.20",
|
||||
"expo-notifications": "~0.29.14",
|
||||
"expo-device": "~7.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.25.2",
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
// src/api/client.ts
|
||||
import axios, { AxiosError } from 'axios'; // Import AxiosError
|
||||
import axios, { AxiosError } from 'axios';
|
||||
import { Platform } from 'react-native';
|
||||
import * as SecureStore from 'expo-secure-store';
|
||||
import AsyncStorage from '@react-native-async-storage/async-storage';
|
||||
|
||||
|
||||
// const API_BASE_URL = process.env.EXPO_PUBLIC_API_URL || 'http://192.168.255.221:8000/api'; // Use your machine's IP
|
||||
// const API_BASE_URL = process.env.EXPO_PUBLIC_API_URL || 'http://192.168.1.9:8000/api'; // Use your machine's IP
|
||||
const API_BASE_URL = process.env.EXPO_PUBLIC_API_URL || 'https://maia.depaoli.id.au/api'; // Use your machine's IP
|
||||
const ACCESS_TOKEN_KEY = 'maia_access_token'; // Renamed for clarity
|
||||
const REFRESH_TOKEN_KEY = 'maia_refresh_token'; // Key for refresh token
|
||||
|
||||
console.log("Using API Base URL:", API_BASE_URL);
|
||||
const API_BASE_URL = process.env.EXPO_PUBLIC_API_URL || 'https://maia.depaoli.id.au/api';
|
||||
const ACCESS_TOKEN_KEY = 'maia_access_token';
|
||||
const REFRESH_TOKEN_KEY = 'maia_refresh_token';
|
||||
|
||||
// Helper functions for storage
|
||||
const storeToken = async (key: string, token: string): Promise<void> => {
|
||||
@@ -34,7 +30,7 @@ const deleteToken = async (key: string): Promise<void> => {
|
||||
if (Platform.OS === 'web') {
|
||||
await AsyncStorage.removeItem(key);
|
||||
} else {
|
||||
await SecureStore.deleteItemAsync(key).catch(() => {}); // Ignore delete error
|
||||
await SecureStore.deleteItemAsync(key).catch(() => {});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -165,6 +161,7 @@ apiClient.interceptors.response.use(
|
||||
|
||||
} // End of 401 handling
|
||||
} else if (error.request) {
|
||||
console.log("Using API Base URL:", API_BASE_URL);
|
||||
console.error('[API Client] Network Error or No Response:', error.message);
|
||||
if (error.message.toLowerCase().includes('network error') && Platform.OS === 'web') {
|
||||
console.warn('[API Client] Hint: A "Network Error" on web often masks a CORS issue. Check browser console & backend CORS config.');
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 247 KiB After Width: | Height: | Size: 71 KiB |
@@ -24,6 +24,7 @@ interface AuthContextData {
|
||||
user: UserData | null; // Add user data to context
|
||||
login: (username: string, password: string) => Promise<void>;
|
||||
logout: () => Promise<void>;
|
||||
register: (username: string, password: string, name: string) => Promise<void>; // Add register function
|
||||
}
|
||||
|
||||
const AuthContext = createContext<AuthContextData>({
|
||||
@@ -32,6 +33,7 @@ const AuthContext = createContext<AuthContextData>({
|
||||
user: null, // Initialize user as null
|
||||
login: async () => { throw new Error('AuthContext not initialized'); },
|
||||
logout: async () => { throw new Error('AuthContext not initialized'); },
|
||||
register: async () => { throw new Error('AuthContext not initialized'); }, // Add register initializer
|
||||
});
|
||||
|
||||
interface AuthProviderProps {
|
||||
@@ -145,6 +147,33 @@ export const AuthProvider: React.FC<AuthProviderProps> = ({ children }) => {
|
||||
}
|
||||
}, [fetchUserData]); // Added fetchUserData dependency
|
||||
|
||||
const register = useCallback(async (username: string, password: string, name: string) => {
|
||||
console.log("[AuthContext] register: Function called with:", username, name);
|
||||
try {
|
||||
// Call the backend register endpoint
|
||||
const response = await apiClient.post('/auth/register', {
|
||||
username,
|
||||
password,
|
||||
name,
|
||||
}, {
|
||||
headers: {
|
||||
'accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
console.log('[AuthContext] register: Registration successful:', response.data);
|
||||
// Optionally, you could automatically log the user in here
|
||||
// For now, we'll just let the user log in manually after registering
|
||||
// Or display a success message and navigate back to login
|
||||
|
||||
} catch (error: any) {
|
||||
console.error("[AuthContext] register: Caught Error Object:", error);
|
||||
// Rethrow the error so the UI can handle it (e.g., display specific messages)
|
||||
throw error;
|
||||
}
|
||||
}, []); // No dependencies needed for register itself
|
||||
|
||||
const logout = useCallback(async () => {
|
||||
console.log('[AuthContext] logout: Logging out.');
|
||||
const refreshToken = await getRefreshToken();
|
||||
@@ -171,7 +200,8 @@ export const AuthProvider: React.FC<AuthProviderProps> = ({ children }) => {
|
||||
user: userState, // Provide user state
|
||||
login,
|
||||
logout,
|
||||
}), [isAuthenticatedState, isLoading, userState, login, logout]); // Added userState dependency
|
||||
register, // Add register to context value
|
||||
}), [isAuthenticatedState, isLoading, userState, login, logout, register]); // Added register dependency
|
||||
|
||||
// ... (rest of the component: Provider, useAuth, AuthLoadingScreen) ...
|
||||
return (
|
||||
|
||||
@@ -3,9 +3,9 @@ import React from 'react';
|
||||
import { createNativeStackNavigator } from '@react-navigation/native-stack';
|
||||
|
||||
import LoginScreen from '../screens/LoginScreen';
|
||||
// Import SignUpScreen, ForgotPasswordScreen etc. if you have them
|
||||
import RegisterScreen from '../screens/RegisterScreen'; // Import the new screen
|
||||
|
||||
import { AuthStackParamList } from '../types/navigation';
|
||||
import { AuthStackParamList } from '../types/navigation'; // Import from the new types file
|
||||
|
||||
const Stack = createNativeStackNavigator<AuthStackParamList>();
|
||||
|
||||
@@ -13,8 +13,7 @@ const AuthNavigator = () => {
|
||||
return (
|
||||
<Stack.Navigator screenOptions={{ headerShown: false }}>
|
||||
<Stack.Screen name="Login" component={LoginScreen} />
|
||||
{/* Add other auth screens here */}
|
||||
{/* <Stack.Screen name="SignUp" component={SignUpScreen} /> */}
|
||||
<Stack.Screen name="Register" component={RegisterScreen} />
|
||||
</Stack.Navigator>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,52 +1,99 @@
|
||||
import React, { useState } from 'react';
|
||||
import { View, StyleSheet } from 'react-native';
|
||||
import { Button, Checkbox, Text, ActivityIndicator, Snackbar } from 'react-native-paper';
|
||||
import { Button, Checkbox, Text, ActivityIndicator, Snackbar, TextInput, Divider, useTheme } from 'react-native-paper'; // Added TextInput, Divider, useTheme
|
||||
import { clearDatabase } from '../api/admin';
|
||||
// Remove useNavigation import if no longer needed elsewhere in this file
|
||||
// import { useNavigation } from '@react-navigation/native';
|
||||
import { useAuth } from '../contexts/AuthContext'; // Import useAuth
|
||||
import apiClient from '../api/client'; // Import apiClient
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
|
||||
const AdminScreen = () => {
|
||||
const [isHardClear, setIsHardClear] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [snackbarVisible, setSnackbarVisible] = useState(false);
|
||||
const [snackbarMessage, setSnackbarMessage] = useState('');
|
||||
// const navigation = useNavigation(); // Remove if not used elsewhere
|
||||
const { logout } = useAuth(); // Get the logout function from context
|
||||
const theme = useTheme(); // Get theme for styling if needed
|
||||
|
||||
// --- State for Clear DB ---
|
||||
const [isHardClear, setIsHardClear] = useState(false);
|
||||
const [isClearingDb, setIsClearingDb] = useState(false); // Renamed from isLoading
|
||||
const [clearDbSnackbarVisible, setClearDbSnackbarVisible] = useState(false); // Renamed
|
||||
const [clearDbSnackbarMessage, setClearDbSnackbarMessage] = useState(''); // Renamed
|
||||
|
||||
// --- State for Send Notification ---
|
||||
const [username, setUsername] = useState('');
|
||||
const [title, setTitle] = useState('');
|
||||
const [body, setBody] = useState('');
|
||||
const [isSendingNotification, setIsSendingNotification] = useState(false); // New loading state
|
||||
const [notificationError, setNotificationError] = useState<string | null>(null); // New error state
|
||||
const [notificationSuccess, setNotificationSuccess] = useState<string | null>(null); // New success state
|
||||
|
||||
const { logout } = useAuth();
|
||||
|
||||
// --- Clear DB Handler ---
|
||||
const handleClearDb = async () => {
|
||||
setIsLoading(true);
|
||||
setSnackbarVisible(false);
|
||||
setIsClearingDb(true); // Use renamed state
|
||||
setClearDbSnackbarVisible(false);
|
||||
try {
|
||||
const response = await clearDatabase(isHardClear);
|
||||
setSnackbarMessage(response.message || 'Database cleared successfully.');
|
||||
setSnackbarVisible(true);
|
||||
setClearDbSnackbarMessage(response.message || 'Database cleared successfully.');
|
||||
setClearDbSnackbarVisible(true);
|
||||
|
||||
// If hard clear was successful, trigger the logout process from AuthContext
|
||||
if (isHardClear) {
|
||||
console.log('Hard clear successful, calling logout...');
|
||||
await logout(); // Call the logout function from AuthContext
|
||||
// The RootNavigator will automatically switch to the AuthFlow
|
||||
// No need to manually navigate or set loading to false here
|
||||
return; // Exit early
|
||||
await logout();
|
||||
return;
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
console.error("Error clearing database:", error);
|
||||
setSnackbarMessage(error.response?.data?.detail || 'Failed to clear database.');
|
||||
setSnackbarVisible(true);
|
||||
setClearDbSnackbarMessage(error.response?.data?.detail || 'Failed to clear database.');
|
||||
setClearDbSnackbarVisible(true);
|
||||
} finally {
|
||||
// Only set loading to false if it wasn't a hard clear (as logout handles navigation)
|
||||
if (!isHardClear) {
|
||||
setIsLoading(false);
|
||||
setIsClearingDb(false); // Use renamed state
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// --- Send Notification Handler ---
|
||||
const handleSendNotification = async () => {
|
||||
if (!username || !title || !body) {
|
||||
setNotificationError('Username, Title, and Body are required.');
|
||||
setNotificationSuccess(null);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsSendingNotification(true);
|
||||
setNotificationError(null);
|
||||
setNotificationSuccess(null);
|
||||
|
||||
try {
|
||||
const response = await apiClient.post('/admin/send-notification', {
|
||||
username,
|
||||
title,
|
||||
body,
|
||||
// data: {} // Add optional data payload if needed
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
setNotificationSuccess(response.data.message || 'Notification sent successfully!');
|
||||
// Clear fields after success
|
||||
setUsername('');
|
||||
setTitle('');
|
||||
setBody('');
|
||||
} else {
|
||||
setNotificationError(response.data?.detail || 'Failed to send notification.');
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error("Error sending notification:", err.response?.data || err.message);
|
||||
setNotificationError(err.response?.data?.detail || 'An error occurred while sending the notification.');
|
||||
} finally {
|
||||
setIsSendingNotification(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
return (
|
||||
<View style={styles.container}>
|
||||
<Text variant="headlineMedium" style={styles.title}>Admin Controls</Text>
|
||||
|
||||
{/* --- Clear Database Section --- */}
|
||||
<Text variant="titleMedium" style={styles.sectionTitle}>Clear Database</Text>
|
||||
<View style={styles.checkboxContainer}>
|
||||
<Checkbox
|
||||
status={isHardClear ? 'checked' : 'unchecked'}
|
||||
@@ -54,24 +101,68 @@ const AdminScreen = () => {
|
||||
/>
|
||||
<Text onPress={() => setIsHardClear(!isHardClear)}>Hard Clear (Delete all data)</Text>
|
||||
</View>
|
||||
|
||||
<Button
|
||||
mode="contained"
|
||||
onPress={handleClearDb}
|
||||
disabled={isLoading}
|
||||
disabled={isClearingDb} // Use renamed state
|
||||
style={styles.button}
|
||||
buttonColor="red" // Make it look dangerous
|
||||
buttonColor="red"
|
||||
>
|
||||
{isLoading ? <ActivityIndicator animating={true} color="white" /> : 'Clear Database'}
|
||||
{isClearingDb ? <ActivityIndicator animating={true} color="white" /> : 'Clear Database'}
|
||||
</Button>
|
||||
|
||||
<Snackbar
|
||||
visible={snackbarVisible}
|
||||
onDismiss={() => setSnackbarVisible(false)}
|
||||
visible={clearDbSnackbarVisible} // Use renamed state
|
||||
onDismiss={() => setClearDbSnackbarVisible(false)}
|
||||
duration={Snackbar.DURATION_SHORT}
|
||||
>
|
||||
{snackbarMessage}
|
||||
{clearDbSnackbarMessage} {/* Use renamed state */}
|
||||
</Snackbar>
|
||||
|
||||
<Divider style={styles.divider} />
|
||||
|
||||
{/* --- Send Notification Section --- */}
|
||||
<Text variant="titleMedium" style={styles.sectionTitle}>Send Push Notification</Text>
|
||||
|
||||
{notificationError && <Text style={[styles.message, { color: theme.colors.error }]}>{notificationError}</Text>}
|
||||
{notificationSuccess && <Text style={[styles.message, { color: theme.colors.primary }]}>{notificationSuccess}</Text>}
|
||||
|
||||
<TextInput
|
||||
label="Username"
|
||||
value={username}
|
||||
onChangeText={setUsername}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
autoCapitalize="none"
|
||||
disabled={isSendingNotification}
|
||||
/>
|
||||
<TextInput
|
||||
label="Notification Title"
|
||||
value={title}
|
||||
onChangeText={setTitle}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
disabled={isSendingNotification}
|
||||
/>
|
||||
<TextInput
|
||||
label="Notification Body"
|
||||
value={body}
|
||||
onChangeText={setBody}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
multiline
|
||||
numberOfLines={3}
|
||||
disabled={isSendingNotification}
|
||||
/>
|
||||
<Button
|
||||
mode="contained"
|
||||
onPress={handleSendNotification}
|
||||
loading={isSendingNotification}
|
||||
disabled={isSendingNotification}
|
||||
style={styles.button}
|
||||
>
|
||||
{isSendingNotification ? 'Sending...' : 'Send Notification'}
|
||||
</Button>
|
||||
|
||||
</View>
|
||||
);
|
||||
};
|
||||
@@ -80,19 +171,37 @@ const styles = StyleSheet.create({
|
||||
container: {
|
||||
flex: 1,
|
||||
padding: 20,
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
// Removed justifyContent and alignItems to allow scrolling if content overflows
|
||||
},
|
||||
title: {
|
||||
marginBottom: 30,
|
||||
marginBottom: 20, // Reduced margin
|
||||
textAlign: 'center',
|
||||
},
|
||||
sectionTitle: {
|
||||
marginBottom: 15,
|
||||
marginTop: 10, // Add some space before the title
|
||||
textAlign: 'center',
|
||||
},
|
||||
checkboxContainer: {
|
||||
flexDirection: 'row',
|
||||
alignItems: 'center',
|
||||
marginBottom: 20,
|
||||
marginBottom: 10, // Reduced margin
|
||||
justifyContent: 'center', // Center checkbox
|
||||
},
|
||||
button: {
|
||||
marginTop: 10,
|
||||
marginBottom: 10, // Add margin below button
|
||||
},
|
||||
input: {
|
||||
marginBottom: 15,
|
||||
},
|
||||
message: {
|
||||
marginBottom: 15,
|
||||
textAlign: 'center',
|
||||
fontWeight: 'bold',
|
||||
},
|
||||
divider: {
|
||||
marginVertical: 30, // Add vertical space around the divider
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -143,13 +143,6 @@ const EventFormScreen = () => {
|
||||
const handleStartDateConfirm = (date: Date) => {
|
||||
setStartDate(date);
|
||||
setWebStartDateInput(formatForWebInput(date)); // Update web input state
|
||||
// Optional: Auto-set end date if it's before start date or null
|
||||
if (!endDate || endDate < date) {
|
||||
const newEndDate = new Date(date);
|
||||
newEndDate.setHours(date.getHours() + 1); // Default to 1 hour later
|
||||
setEndDate(newEndDate);
|
||||
setWebEndDateInput(formatForWebInput(newEndDate)); // Update web input state
|
||||
}
|
||||
validateForm({ start: date }); // Validate after setting
|
||||
hideStartDatePicker();
|
||||
};
|
||||
@@ -189,13 +182,6 @@ const EventFormScreen = () => {
|
||||
if (isValid(parsedDate) && text.length >= 15) { // Basic length check for 'yyyy-MM-dd HH:mm'
|
||||
if (type === 'start') {
|
||||
setStartDate(parsedDate);
|
||||
// Optional: Auto-set end date
|
||||
if (!endDate || endDate < parsedDate) {
|
||||
const newEndDate = new Date(parsedDate);
|
||||
newEndDate.setHours(parsedDate.getHours() + 1);
|
||||
setEndDate(newEndDate);
|
||||
setWebEndDateInput(formatForWebInput(newEndDate)); // Update other web input too
|
||||
}
|
||||
validateForm({ start: parsedDate }); // Validate with the actual Date
|
||||
} else {
|
||||
setEndDate(parsedDate);
|
||||
|
||||
@@ -3,8 +3,12 @@ import React, { useState } from 'react';
|
||||
import { View, StyleSheet, KeyboardAvoidingView, Platform } from 'react-native';
|
||||
import { TextInput, Button, Text, useTheme, HelperText, ActivityIndicator, Avatar } from 'react-native-paper';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { NativeStackScreenProps } from '@react-navigation/native-stack';
|
||||
import { AuthStackParamList } from '../types/navigation'; // Import from the new types file
|
||||
|
||||
const LoginScreen = () => {
|
||||
type LoginScreenProps = NativeStackScreenProps<AuthStackParamList, 'Login'>;
|
||||
|
||||
const LoginScreen: React.FC<LoginScreenProps> = ({ navigation }) => {
|
||||
const theme = useTheme();
|
||||
const { login } = useAuth();
|
||||
const [username, setUsername] = useState('');
|
||||
@@ -116,6 +120,7 @@ const LoginScreen = () => {
|
||||
{isLoading ? (
|
||||
<ActivityIndicator animating={true} color={theme.colors.primary} style={styles.loadingContainer}/>
|
||||
) : (
|
||||
<>
|
||||
<Button
|
||||
mode="contained"
|
||||
onPress={handleLogin}
|
||||
@@ -125,9 +130,21 @@ const LoginScreen = () => {
|
||||
>
|
||||
Login
|
||||
</Button>
|
||||
|
||||
{/* Add Register Button */}
|
||||
<Button
|
||||
mode="outlined" // Use outlined for secondary action
|
||||
onPress={() => navigation.navigate('Register')} // Navigate to Register screen
|
||||
style={styles.button} // Reuse button style or create a new one
|
||||
disabled={isLoading}
|
||||
icon="account-plus-outline"
|
||||
>
|
||||
Register
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* TODO: Add Register here */}
|
||||
{/* TODO: Add Register here - REMOVED */}
|
||||
</KeyboardAvoidingView>
|
||||
);
|
||||
};
|
||||
|
||||
171
interfaces/nativeapp/src/screens/RegisterScreen.tsx
Normal file
171
interfaces/nativeapp/src/screens/RegisterScreen.tsx
Normal file
@@ -0,0 +1,171 @@
|
||||
// src/screens/RegisterScreen.tsx
|
||||
import React, { useState } from 'react';
|
||||
import { View, StyleSheet, KeyboardAvoidingView, Platform, Alert } from 'react-native';
|
||||
import { TextInput, Button, Text, useTheme, HelperText, ActivityIndicator, Avatar } from 'react-native-paper';
|
||||
import { useAuth } from '../contexts/AuthContext';
|
||||
import { NativeStackScreenProps } from '@react-navigation/native-stack';
|
||||
import { AuthStackParamList } from '../types/navigation'; // Import from the new types file
|
||||
|
||||
type RegisterScreenProps = NativeStackScreenProps<AuthStackParamList, 'Register'>;
|
||||
|
||||
const RegisterScreen: React.FC<RegisterScreenProps> = ({ navigation }) => {
|
||||
const theme = useTheme();
|
||||
const { register } = useAuth();
|
||||
const [username, setUsername] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
const [confirmPassword, setConfirmPassword] = useState('');
|
||||
const [name, setName] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleRegister = async () => {
|
||||
console.log("[RegisterScreen] handleRegister: Button pressed.");
|
||||
if (!username || !password || !name || !confirmPassword) {
|
||||
setError('Please fill in all fields.');
|
||||
return;
|
||||
}
|
||||
if (password !== confirmPassword) {
|
||||
setError('Passwords do not match.');
|
||||
return;
|
||||
}
|
||||
setError(null);
|
||||
setIsLoading(true);
|
||||
try {
|
||||
console.log("[RegisterScreen] handleRegister: Calling context register function...");
|
||||
await register(username, password, name);
|
||||
console.log("[RegisterScreen] handleRegister: Registration successful (from context perspective).");
|
||||
// Show success message and navigate back to Login
|
||||
navigation.navigate('Login');
|
||||
} catch (err: any) {
|
||||
console.log("[RegisterScreen] handleRegister: Caught error from context register.");
|
||||
const errorMessage = err.response?.data?.detail ||
|
||||
err.response?.data?.message ||
|
||||
err.message ||
|
||||
'Registration failed. Please try again.';
|
||||
setError(errorMessage);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
console.log("[RegisterScreen] handleRegister: Set loading to false.");
|
||||
}
|
||||
};
|
||||
|
||||
const styles = StyleSheet.create({
|
||||
container: {
|
||||
flex: 1,
|
||||
justifyContent: 'center',
|
||||
padding: 20,
|
||||
backgroundColor: theme.colors.background,
|
||||
},
|
||||
logoContainer: {
|
||||
alignItems: 'center',
|
||||
marginBottom: 30, // Slightly less margin than login
|
||||
},
|
||||
title: {
|
||||
fontSize: 24,
|
||||
fontWeight: 'bold',
|
||||
textAlign: 'center',
|
||||
marginBottom: 20,
|
||||
color: theme.colors.primary,
|
||||
},
|
||||
input: {
|
||||
marginBottom: 12, // Slightly less margin
|
||||
},
|
||||
button: {
|
||||
marginTop: 10,
|
||||
paddingVertical: 8,
|
||||
},
|
||||
loginButton: {
|
||||
marginTop: 15,
|
||||
},
|
||||
errorText: {
|
||||
textAlign: 'center',
|
||||
marginBottom: 10,
|
||||
},
|
||||
loadingContainer: {
|
||||
marginTop: 20,
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<KeyboardAvoidingView
|
||||
behavior={Platform.OS === "ios" ? "padding" : "height"}
|
||||
style={styles.container}
|
||||
>
|
||||
<View style={styles.logoContainer}>
|
||||
<Avatar.Image
|
||||
size={90} // Slightly smaller logo
|
||||
source={require('../assets/MAIA_ICON.png')}
|
||||
/>
|
||||
</View>
|
||||
<Text style={styles.title}>Create Account</Text>
|
||||
<TextInput
|
||||
label="Name"
|
||||
value={name}
|
||||
onChangeText={setName}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
autoCapitalize="words"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<TextInput
|
||||
label="Username"
|
||||
value={username}
|
||||
onChangeText={setUsername}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
autoCapitalize="none"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<TextInput
|
||||
label="Password"
|
||||
value={password}
|
||||
onChangeText={setPassword}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
secureTextEntry
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<TextInput
|
||||
label="Confirm Password"
|
||||
value={confirmPassword}
|
||||
onChangeText={setConfirmPassword}
|
||||
mode="outlined"
|
||||
style={styles.input}
|
||||
secureTextEntry
|
||||
disabled={isLoading}
|
||||
/>
|
||||
|
||||
<HelperText type="error" visible={!!error} style={styles.errorText}>
|
||||
{error}
|
||||
</HelperText>
|
||||
|
||||
{isLoading ? (
|
||||
<ActivityIndicator animating={true} color={theme.colors.primary} style={styles.loadingContainer}/>
|
||||
) : (
|
||||
<Button
|
||||
mode="contained"
|
||||
onPress={handleRegister}
|
||||
style={styles.button}
|
||||
disabled={isLoading}
|
||||
icon="account-plus"
|
||||
>
|
||||
Register
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{/* Button to go back to Login */}
|
||||
<Button
|
||||
mode="text" // Use text button for secondary action
|
||||
onPress={() => navigation.navigate('Login')}
|
||||
style={styles.loginButton}
|
||||
disabled={isLoading}
|
||||
icon="arrow-left"
|
||||
>
|
||||
Back to Login
|
||||
</Button>
|
||||
|
||||
</KeyboardAvoidingView>
|
||||
);
|
||||
};
|
||||
|
||||
export default RegisterScreen;
|
||||
149
interfaces/nativeapp/src/services/notificationService.ts
Normal file
149
interfaces/nativeapp/src/services/notificationService.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import * as Device from 'expo-device';
|
||||
import * as Notifications from 'expo-notifications';
|
||||
import { Platform } from 'react-native';
|
||||
import apiClient from '../api/client';
|
||||
import Constants from 'expo-constants';
|
||||
|
||||
// Define the structure of the push token data expected by the backend
|
||||
interface PushTokenData {
|
||||
token: string;
|
||||
device_name?: string;
|
||||
token_type: 'expo'; // Indicate the type of token
|
||||
}
|
||||
|
||||
// --- Android Notification Channel Setup ---
|
||||
async function setupNotificationChannelsAndroid() {
|
||||
if (Platform.OS === 'android') {
|
||||
await Notifications.setNotificationChannelAsync('default', {
|
||||
name: 'Default',
|
||||
importance: Notifications.AndroidImportance.MAX,
|
||||
vibrationPattern: [0, 250, 250, 250],
|
||||
lightColor: '#FF231F7C',
|
||||
});
|
||||
console.log('[Notifications] Default Android channel set up.');
|
||||
}
|
||||
}
|
||||
|
||||
// --- Request Permissions and Get Token ---
|
||||
export async function registerForPushNotificationsAsync(): Promise<string | null> {
|
||||
if (Platform.OS !== 'android' && Platform.OS !== 'ios') {
|
||||
console.warn('[Notifications] Push notifications are only supported on Android and iOS.');
|
||||
return null;
|
||||
}
|
||||
let token: string | null = null;
|
||||
|
||||
if (!Device.isDevice) {
|
||||
console.warn('[Notifications] Push notifications require a physical device.');
|
||||
alert('Must use physical device for Push Notifications');
|
||||
return null;
|
||||
}
|
||||
|
||||
// 1. Setup Android Channels
|
||||
await setupNotificationChannelsAndroid();
|
||||
|
||||
// 2. Request Permissions
|
||||
const { status: existingStatus } = await Notifications.getPermissionsAsync();
|
||||
let finalStatus = existingStatus;
|
||||
if (existingStatus !== 'granted') {
|
||||
console.log('[Notifications] Requesting notification permissions...');
|
||||
const { status } = await Notifications.requestPermissionsAsync();
|
||||
finalStatus = status;
|
||||
}
|
||||
|
||||
if (finalStatus !== 'granted') {
|
||||
console.warn('[Notifications] Failed to get push token: Permission not granted.');
|
||||
alert('Failed to get push token for push notification!');
|
||||
return null;
|
||||
}
|
||||
|
||||
// 3. Get Expo Push Token
|
||||
try {
|
||||
// Use the default experience ID
|
||||
const projectId = process.env.EXPO_PROJECT_ID || Constants.expoConfig?.extra?.eas?.projectId;
|
||||
if (!projectId) {
|
||||
console.error('[Notifications] EAS project ID not found in app config. Cannot get push token.');
|
||||
alert('Configuration error: Project ID missing. Cannot get push token.');
|
||||
return null;
|
||||
}
|
||||
console.log(`[Notifications] Getting Expo push token with projectId: ${projectId}`);
|
||||
const expoPushToken = await Notifications.getExpoPushTokenAsync({ projectId });
|
||||
token = expoPushToken.data;
|
||||
console.log('[Notifications] Received Expo Push Token:', token);
|
||||
} catch (error) {
|
||||
console.error('[Notifications] Error getting Expo push token:', error);
|
||||
alert(`Error getting push token: ${error instanceof Error ? error.message : String(error)}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
// --- Send Token to Backend ---
|
||||
export async function sendPushTokenToBackend(expoPushToken: string): Promise<boolean> {
|
||||
if (!expoPushToken) {
|
||||
console.warn('[Notifications] No push token provided to send to backend.');
|
||||
return false;
|
||||
}
|
||||
|
||||
const tokenData: PushTokenData = {
|
||||
token: expoPushToken,
|
||||
device_name: Device.deviceName ?? undefined,
|
||||
token_type: 'expo',
|
||||
};
|
||||
|
||||
try {
|
||||
console.log('[Notifications] Sending push token to backend:', tokenData);
|
||||
const response = await apiClient.post('/user/push-token', tokenData);
|
||||
|
||||
if (response.status === 200 || response.status === 201) {
|
||||
console.log('[Notifications] Push token successfully sent to backend.');
|
||||
return true;
|
||||
} else {
|
||||
console.warn(`[Notifications] Backend returned status ${response.status} when sending push token.`);
|
||||
return false;
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('[Notifications] Error sending push token to backend:', error.response?.data || error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Notification Handling Setup ---
|
||||
export function setupNotificationHandlers() {
|
||||
// Handle notifications that arrive while the app is foregrounded
|
||||
Notifications.setNotificationHandler({
|
||||
handleNotification: async () => ({
|
||||
shouldShowAlert: true,
|
||||
shouldPlaySound: true,
|
||||
shouldSetBadge: false,
|
||||
}),
|
||||
});
|
||||
|
||||
// Handle user interaction with notifications (tapping) when app is foregrounded/backgrounded
|
||||
const foregroundInteractionSubscription = Notifications.addNotificationResponseReceivedListener(response => {
|
||||
console.log('[Notifications] User interacted with notification (foreground/background):', response.notification.request.content);
|
||||
|
||||
// const data = response.notification.request.content.data;
|
||||
// if (data?.screen) {
|
||||
// navigation.navigate(data.screen);
|
||||
// }
|
||||
});
|
||||
|
||||
// Handle user interaction with notifications (tapping) when app was killed/not running
|
||||
// This requires careful setup, potentially using Linking or initial URL handling
|
||||
// Notifications.getLastNotificationResponseAsync().then(response => {
|
||||
// if (response) {
|
||||
// console.log('[Notifications] User opened app via notification (killed state):', response.notification.request.content);
|
||||
// // Handle navigation or action based on response.notification.request.content.data
|
||||
// }
|
||||
// });
|
||||
|
||||
|
||||
console.log('[Notifications] Notification handlers set up.');
|
||||
|
||||
// Return cleanup function for useEffect
|
||||
return () => {
|
||||
console.log('[Notifications] Removing notification listeners.');
|
||||
Notifications.removeNotificationSubscription(foregroundInteractionSubscription);
|
||||
};
|
||||
}
|
||||
@@ -21,19 +21,20 @@ export type WebContentStackParamList = {
|
||||
};
|
||||
|
||||
// Screens managed by the Root Navigator (Auth vs App)
|
||||
export type RootStackParamList = {
|
||||
AuthFlow: undefined; // Represents the stack for unauthenticated users
|
||||
AppFlow: undefined; // Represents the stack/layout for authenticated users
|
||||
};
|
||||
export type RootStackParamList = AuthStackParamList & AppStackParamList;
|
||||
|
||||
// Screens within the Authentication Flow
|
||||
export type AuthStackParamList = {
|
||||
Login: undefined;
|
||||
// Example: SignUp: undefined; ForgotPassword: undefined;
|
||||
Register: undefined;
|
||||
};
|
||||
|
||||
// Screens within the main App stack (Mobile)
|
||||
export type AppStackParamList = {
|
||||
Home: undefined;
|
||||
Settings: undefined;
|
||||
Calendar: undefined;
|
||||
Todo: undefined;
|
||||
MainTabs: undefined; // Represents the MobileTabNavigator
|
||||
EventForm: { eventId?: number; selectedDate?: string };
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user