[REFORMAT] Ran black reformat
This commit is contained in:
@@ -7,13 +7,27 @@ from core.database import get_db
|
||||
|
||||
from modules.auth.dependencies import get_current_user
|
||||
from modules.auth.models import User
|
||||
|
||||
# Import the new service functions and Enum
|
||||
from modules.nlp.service import process_request, ask_ai, save_chat_message, get_chat_history, MessageSender
|
||||
from modules.nlp.service import (
|
||||
process_request,
|
||||
ask_ai,
|
||||
save_chat_message,
|
||||
get_chat_history,
|
||||
MessageSender,
|
||||
)
|
||||
|
||||
# Import the response schema and the new ChatMessage model for response type hinting
|
||||
from modules.nlp.schemas import ProcessCommandRequest, ProcessCommandResponse
|
||||
from modules.calendar.service import create_calendar_event, get_calendar_events, update_calendar_event, delete_calendar_event
|
||||
from modules.calendar.service import (
|
||||
create_calendar_event,
|
||||
get_calendar_events,
|
||||
update_calendar_event,
|
||||
delete_calendar_event,
|
||||
)
|
||||
from modules.calendar.models import CalendarEvent
|
||||
from modules.calendar.schemas import CalendarEventCreate, CalendarEventUpdate
|
||||
|
||||
# Import TODO services, schemas, and model
|
||||
from modules.todo import service as todo_service
|
||||
from modules.todo.models import Todo
|
||||
@@ -21,17 +35,20 @@ from modules.todo.schemas import TodoCreate, TodoUpdate
|
||||
from pydantic import BaseModel
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class ChatMessageResponse(BaseModel):
|
||||
id: int
|
||||
sender: MessageSender # Use the enum directly
|
||||
sender: MessageSender # Use the enum directly
|
||||
text: str
|
||||
timestamp: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True # Allow Pydantic to work with ORM models
|
||||
|
||||
from_attributes = True # Allow Pydantic to work with ORM models
|
||||
|
||||
|
||||
router = APIRouter(prefix="/nlp", tags=["nlp"])
|
||||
|
||||
|
||||
# Helper to format calendar events (expects list of CalendarEvent models)
|
||||
def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
if not events:
|
||||
@@ -39,12 +56,15 @@ def format_calendar_events(events: List[CalendarEvent]) -> List[str]:
|
||||
formatted = ["Here are the events:"]
|
||||
for event in events:
|
||||
# Access attributes directly from the model instance
|
||||
start_str = event.start.strftime("%Y-%m-%d %H:%M") if event.start else "No start time"
|
||||
start_str = (
|
||||
event.start.strftime("%Y-%m-%d %H:%M") if event.start else "No start time"
|
||||
)
|
||||
end_str = event.end.strftime("%H:%M") if event.end else ""
|
||||
title = event.title or "Untitled Event"
|
||||
formatted.append(f"- {title} ({start_str}{' - ' + end_str if end_str else ''})")
|
||||
return formatted
|
||||
|
||||
|
||||
# Helper to format TODO items (expects list of Todo models)
|
||||
def format_todos(todos: List[Todo]) -> List[str]:
|
||||
if not todos:
|
||||
@@ -54,19 +74,28 @@ def format_todos(todos: List[Todo]) -> List[str]:
|
||||
status = "[X]" if todo.complete else "[ ]"
|
||||
date_str = f" (Due: {todo.date.strftime('%Y-%m-%d')})" if todo.date else ""
|
||||
remind_str = " (Reminder)" if todo.remind else ""
|
||||
formatted.append(f"- {status} {todo.task}{date_str}{remind_str} (ID: {todo.id})")
|
||||
formatted.append(
|
||||
f"- {status} {todo.task}{date_str}{remind_str} (ID: {todo.id})"
|
||||
)
|
||||
return formatted
|
||||
|
||||
|
||||
# Update the response model for the endpoint
|
||||
@router.post("/process-command", response_model=ProcessCommandResponse)
|
||||
def process_command(request_data: ProcessCommandRequest, current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
def process_command(
|
||||
request_data: ProcessCommandRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Process the user command, save messages, execute action, save response, and return user-friendly responses.
|
||||
"""
|
||||
user_input = request_data.user_input
|
||||
|
||||
# --- Save User Message ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.USER, text=user_input)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.USER, text=user_input
|
||||
)
|
||||
# ------------------------
|
||||
|
||||
command_data = process_request(user_input)
|
||||
@@ -74,11 +103,13 @@ def process_command(request_data: ProcessCommandRequest, current_user: User = De
|
||||
params = command_data["params"]
|
||||
response_text = command_data["response_text"]
|
||||
|
||||
responses = [response_text] # Start with the initial response
|
||||
responses = [response_text] # Start with the initial response
|
||||
|
||||
# --- Save Initial AI Response ---
|
||||
# Save the first response generated by process_request
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=response_text)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=response_text
|
||||
)
|
||||
# -----------------------------
|
||||
|
||||
if intent == "error":
|
||||
@@ -97,139 +128,233 @@ def process_command(request_data: ProcessCommandRequest, current_user: User = De
|
||||
ai_answer = ask_ai(**params)
|
||||
responses.append(ai_answer)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=ai_answer)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=ai_answer
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "get_calendar_events":
|
||||
events: List[CalendarEvent] = get_calendar_events(db, current_user.id, **params)
|
||||
events: List[CalendarEvent] = get_calendar_events(
|
||||
db, current_user.id, **params
|
||||
)
|
||||
formatted_responses = format_calendar_events(events)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=resp)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_calendar_event":
|
||||
event_data = CalendarEventCreate(**params)
|
||||
created_event = create_calendar_event(db, current_user.id, event_data)
|
||||
start_str = created_event.start.strftime("%Y-%m-%d %H:%M") if created_event.start else "No start time"
|
||||
start_str = (
|
||||
created_event.start.strftime("%Y-%m-%d %H:%M")
|
||||
if created_event.start
|
||||
else "No start time"
|
||||
)
|
||||
title = created_event.title or "Untitled Event"
|
||||
add_response = f"Added: {title} starting at {start_str}."
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=add_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_calendar_event":
|
||||
event_id = params.pop('event_id', None)
|
||||
event_id = params.pop("event_id", None)
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for update."
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=error_msg)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=error_msg,
|
||||
)
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
event_data = CalendarEventUpdate(**params)
|
||||
updated_event = update_calendar_event(db, current_user.id, event_id, event_data=event_data)
|
||||
updated_event = update_calendar_event(
|
||||
db, current_user.id, event_id, event_data=event_data
|
||||
)
|
||||
title = updated_event.title or "Untitled Event"
|
||||
update_response = f"Updated event ID {updated_event.id}: {title}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=update_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_calendar_event":
|
||||
event_id = params.get('event_id')
|
||||
event_id = params.get("event_id")
|
||||
if event_id is None:
|
||||
# Save the error message before raising
|
||||
error_msg = "Event ID is required for delete."
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=error_msg)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=error_msg,
|
||||
)
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
delete_calendar_event(db, current_user.id, event_id)
|
||||
delete_response = f"Deleted event ID {event_id}."
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=delete_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
# --- Add TODO Cases ---
|
||||
# --- Add TODO Cases ---
|
||||
case "get_todos":
|
||||
todos: List[Todo] = todo_service.get_todos(db, user=current_user, **params)
|
||||
todos: List[Todo] = todo_service.get_todos(
|
||||
db, user=current_user, **params
|
||||
)
|
||||
formatted_responses = format_todos(todos)
|
||||
responses.extend(formatted_responses)
|
||||
# --- Save Additional AI Responses ---
|
||||
for resp in formatted_responses:
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=resp)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=resp
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "add_todo":
|
||||
todo_data = TodoCreate(**params)
|
||||
created_todo = todo_service.create_todo(db, todo=todo_data, user=current_user)
|
||||
add_response = f"Added TODO: '{created_todo.task}' (ID: {created_todo.id})."
|
||||
created_todo = todo_service.create_todo(
|
||||
db, todo=todo_data, user=current_user
|
||||
)
|
||||
add_response = (
|
||||
f"Added TODO: '{created_todo.task}' (ID: {created_todo.id})."
|
||||
)
|
||||
responses.append(add_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=add_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=add_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "update_todo":
|
||||
todo_id = params.pop('todo_id', None)
|
||||
todo_id = params.pop("todo_id", None)
|
||||
if todo_id is None:
|
||||
error_msg = "TODO ID is required for update."
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=error_msg)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=error_msg,
|
||||
)
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
todo_data = TodoUpdate(**params)
|
||||
updated_todo = todo_service.update_todo(db, todo_id=todo_id, todo_update=todo_data, user=current_user)
|
||||
update_response = f"Updated TODO ID {updated_todo.id}: '{updated_todo.task}'."
|
||||
if 'complete' in params:
|
||||
status = "complete" if params['complete'] else "incomplete"
|
||||
updated_todo = todo_service.update_todo(
|
||||
db, todo_id=todo_id, todo_update=todo_data, user=current_user
|
||||
)
|
||||
update_response = (
|
||||
f"Updated TODO ID {updated_todo.id}: '{updated_todo.task}'."
|
||||
)
|
||||
if "complete" in params:
|
||||
status = "complete" if params["complete"] else "incomplete"
|
||||
update_response += f" Marked as {status}."
|
||||
responses.append(update_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=update_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=update_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
case "delete_todo":
|
||||
todo_id = params.get('todo_id')
|
||||
todo_id = params.get("todo_id")
|
||||
if todo_id is None:
|
||||
error_msg = "TODO ID is required for delete."
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=error_msg)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=error_msg,
|
||||
)
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
deleted_todo = todo_service.delete_todo(db, todo_id=todo_id, user=current_user)
|
||||
delete_response = f"Deleted TODO ID {deleted_todo.id}: '{deleted_todo.task}'."
|
||||
deleted_todo = todo_service.delete_todo(
|
||||
db, todo_id=todo_id, user=current_user
|
||||
)
|
||||
delete_response = (
|
||||
f"Deleted TODO ID {deleted_todo.id}: '{deleted_todo.task}'."
|
||||
)
|
||||
responses.append(delete_response)
|
||||
# --- Save Additional AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=delete_response)
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=delete_response,
|
||||
)
|
||||
# ---------------------------------
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
# --- End TODO Cases ---
|
||||
|
||||
case _:
|
||||
print(f"Warning: Unhandled intent '{intent}' reached api.py match statement.")
|
||||
case _:
|
||||
print(
|
||||
f"Warning: Unhandled intent '{intent}' reached api.py match statement."
|
||||
)
|
||||
# The initial response_text was already saved
|
||||
return ProcessCommandResponse(responses=responses)
|
||||
|
||||
except HTTPException as http_exc:
|
||||
# Don't save again if already saved before raising
|
||||
if http_exc.status_code != 400 or ('event_id' not in http_exc.detail.lower()):
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=http_exc.detail)
|
||||
if http_exc.status_code != 400 or ("event_id" not in http_exc.detail.lower()):
|
||||
save_chat_message(
|
||||
db,
|
||||
user_id=current_user.id,
|
||||
sender=MessageSender.AI,
|
||||
text=http_exc.detail,
|
||||
)
|
||||
raise http_exc
|
||||
except Exception as e:
|
||||
print(f"Error executing intent '{intent}': {e}")
|
||||
error_response = "Sorry, I encountered an error while trying to perform that action."
|
||||
error_response = (
|
||||
"Sorry, I encountered an error while trying to perform that action."
|
||||
)
|
||||
# --- Save Final Error AI Response ---
|
||||
save_chat_message(db, user_id=current_user.id, sender=MessageSender.AI, text=error_response)
|
||||
save_chat_message(
|
||||
db, user_id=current_user.id, sender=MessageSender.AI, text=error_response
|
||||
)
|
||||
# ----------------------------------
|
||||
return ProcessCommandResponse(responses=[error_response])
|
||||
|
||||
|
||||
@router.get("/history", response_model=List[ChatMessageResponse])
|
||||
def read_chat_history(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
||||
def read_chat_history(
|
||||
current_user: User = Depends(get_current_user), db: Session = Depends(get_db)
|
||||
):
|
||||
"""Retrieves the last 50 chat messages for the current user."""
|
||||
history = get_chat_history(db, user_id=current_user.id, limit=50)
|
||||
return history
|
||||
# -------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
\
|
||||
# /home/cdp/code/MAIA/backend/modules/nlp/models.py
|
||||
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.orm import relationship
|
||||
@@ -7,10 +6,12 @@ import enum
|
||||
|
||||
from core.database import Base
|
||||
|
||||
|
||||
class MessageSender(enum.Enum):
|
||||
USER = "user"
|
||||
AI = "ai"
|
||||
|
||||
|
||||
class ChatMessage(Base):
|
||||
__tablename__ = "chat_messages"
|
||||
|
||||
@@ -20,4 +21,4 @@ class ChatMessage(Base):
|
||||
text = Column(Text, nullable=False)
|
||||
timestamp = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
owner = relationship("User") # Relationship to the User model
|
||||
owner = relationship("User") # Relationship to the User model
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
|
||||
|
||||
class ProcessCommandRequest(BaseModel):
|
||||
user_input: str
|
||||
|
||||
|
||||
class ProcessCommandResponse(BaseModel):
|
||||
responses: List[str]
|
||||
# Optional: Keep details if needed for specific frontend logic beyond display
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# modules/nlp/service.py
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc # Import desc for ordering
|
||||
from sqlalchemy import desc # Import desc for ordering
|
||||
from google import genai
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import List # Import List
|
||||
from typing import List # Import List
|
||||
|
||||
# Import the new model and Enum
|
||||
from .models import ChatMessage, MessageSender
|
||||
@@ -14,7 +14,8 @@ from core.config import settings
|
||||
client = genai.Client(api_key=settings.GOOGLE_API_KEY)
|
||||
|
||||
### Base prompt for MAIA, used for inital user requests
|
||||
SYSTEM_PROMPT = """
|
||||
SYSTEM_PROMPT = (
|
||||
"""
|
||||
You are MAIA - My AI Assistant. Your job is to parse user requests into structured JSON commands and generate a user-facing response text.
|
||||
|
||||
Available functions/intents:
|
||||
@@ -109,8 +110,11 @@ MAIA:
|
||||
"response_text": "Okay, I've deleted task 2 from your list."
|
||||
}
|
||||
|
||||
The datetime right now is """+str(datetime.now(timezone.utc))+""".
|
||||
The datetime right now is """
|
||||
+ str(datetime.now(timezone.utc))
|
||||
+ """.
|
||||
"""
|
||||
)
|
||||
|
||||
### Prompt for MAIA to forward user request to AI
|
||||
SYSTEM_FORWARD_PROMPT = f"""
|
||||
@@ -123,6 +127,7 @@ Here is the user request:
|
||||
|
||||
# --- Chat History Service Functions ---
|
||||
|
||||
|
||||
def save_chat_message(db: Session, user_id: int, sender: MessageSender, text: str):
|
||||
"""Saves a chat message to the database."""
|
||||
db_message = ChatMessage(user_id=user_id, sender=sender, text=text)
|
||||
@@ -131,16 +136,21 @@ def save_chat_message(db: Session, user_id: int, sender: MessageSender, text: st
|
||||
db.refresh(db_message)
|
||||
return db_message
|
||||
|
||||
|
||||
def get_chat_history(db: Session, user_id: int, limit: int = 50) -> List[ChatMessage]:
|
||||
"""Retrieves the last 'limit' chat messages for a user."""
|
||||
return db.query(ChatMessage)\
|
||||
.filter(ChatMessage.user_id == user_id)\
|
||||
.order_by(desc(ChatMessage.timestamp))\
|
||||
.limit(limit)\
|
||||
.all()[::-1] # Reverse to get oldest first for display order
|
||||
return (
|
||||
db.query(ChatMessage)
|
||||
.filter(ChatMessage.user_id == user_id)
|
||||
.order_by(desc(ChatMessage.timestamp))
|
||||
.limit(limit)
|
||||
.all()[::-1]
|
||||
) # Reverse to get oldest first for display order
|
||||
|
||||
|
||||
# --- Existing NLP Service Functions ---
|
||||
|
||||
|
||||
def process_request(request: str):
|
||||
"""
|
||||
Process the user request using the Google GenAI API.
|
||||
@@ -152,7 +162,7 @@ def process_request(request: str):
|
||||
config={
|
||||
"temperature": 0.3, # Less creativity, more factual
|
||||
"response_mime_type": "application/json",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
# Parse the JSON response
|
||||
@@ -160,7 +170,9 @@ def process_request(request: str):
|
||||
parsed_response = json.loads(response.text)
|
||||
# Validate required fields
|
||||
if not all(k in parsed_response for k in ("intent", "params", "response_text")):
|
||||
raise ValueError("AI response missing required fields (intent, params, response_text)")
|
||||
raise ValueError(
|
||||
"AI response missing required fields (intent, params, response_text)"
|
||||
)
|
||||
return parsed_response
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
print(f"Error parsing AI response: {e}")
|
||||
@@ -169,9 +181,10 @@ def process_request(request: str):
|
||||
return {
|
||||
"intent": "error",
|
||||
"params": {},
|
||||
"response_text": "Sorry, I had trouble understanding that request or formulating a response. Could you please try rephrasing?"
|
||||
"response_text": "Sorry, I had trouble understanding that request or formulating a response. Could you please try rephrasing?",
|
||||
}
|
||||
|
||||
|
||||
def ask_ai(request: str):
|
||||
"""
|
||||
Ask the AI a question.
|
||||
@@ -179,6 +192,6 @@ def ask_ai(request: str):
|
||||
"""
|
||||
response = client.models.generate_content(
|
||||
model="gemini-2.0-flash",
|
||||
contents=SYSTEM_FORWARD_PROMPT+request,
|
||||
contents=SYSTEM_FORWARD_PROMPT + request,
|
||||
)
|
||||
return response.text
|
||||
return response.text
|
||||
|
||||
Reference in New Issue
Block a user