Automated Action 97c002ac88 Add AI-powered chat-to-tasks feature
Implement a new endpoint that converts natural language input into structured tasks using an LLM. Features include:
- LLM service abstraction with support for OpenAI and Google Gemini
- Dependency injection pattern for easy provider switching
- Robust error handling and response formatting
- Integration with existing user authentication and task creation
- Fallback to mock LLM service for testing or development
2025-05-17 07:44:19 +00:00

126 lines
4.7 KiB
Python

"""
Router for the chat-to-tasks functionality.
"""
import logging
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from app.api import deps
from app.crud import task as task_crud
from app.models.user import User
from app.schemas.chat import ChatInput, ChatResponse, ChatProcessingError
from app.schemas.task import TaskCreate, TaskRead
from app.services.llm_service import LLMService, get_llm_service
from app.db.session import get_db
# Set up logger
logger = logging.getLogger(__name__)
router = APIRouter()
@router.post("/chat-to-tasks", response_model=ChatResponse)
async def create_tasks_from_chat(
chat_input: ChatInput,
db: Session = Depends(get_db),
current_user: User = Depends(deps.get_current_active_user),
llm_service: LLMService = Depends(get_llm_service),
):
"""
Convert natural language chat input into one or more task objects.
This endpoint:
1. Takes the user's natural language input
2. Sends it to an LLM for processing
3. Parses the LLM's response into TaskCreate objects
4. Creates the tasks in the database
5. Returns the created tasks
All tasks are associated with the authenticated user.
"""
if not chat_input.message or len(chat_input.message.strip()) < 3:
raise HTTPException(
status_code=400,
detail="Message must be at least 3 characters long",
)
# Initialize response
response = ChatResponse(original_message=chat_input.message)
try:
# Process the chat message with the LLM service
logger.info(f"Processing chat input: {chat_input.message[:50]}...")
llm_tasks_data = await llm_service.chat_to_tasks(chat_input.message)
if not llm_tasks_data:
logger.warning("LLM returned no tasks")
response.processing_successful = False
response.error = ChatProcessingError(
error_type="parsing_error",
error_detail="No tasks could be extracted from your message",
)
return response
# Convert LLM response to TaskCreate objects and create in DB
created_tasks = []
for task_data in llm_tasks_data:
try:
# Map LLM response fields to TaskCreate schema
# Handle different field names or formats that might come from the LLM
task_create_data = {
"title": task_data.get("title", "Untitled Task"),
"description": task_data.get("description", ""),
"priority": task_data.get("priority", "medium").lower(),
}
# Handle due_date if present
if due_date := task_data.get("due_date"):
if due_date != "null" and due_date is not None:
task_create_data["due_date"] = due_date
# Map status if present (convert "pending" to "todo" if needed)
if status := task_data.get("status"):
if status.lower() == "pending":
task_create_data["status"] = "todo"
else:
task_create_data["status"] = status.lower()
# Create TaskCreate object and validate
task_in = TaskCreate(**task_create_data)
# Create task in database with current user as owner
db_task = task_crud.task.create_with_owner(
db=db, obj_in=task_in, user_id=current_user.id
)
# Add created task to response
created_tasks.append(TaskRead.model_validate(db_task))
except Exception as e:
logger.error(f"Error creating task: {e}")
# Continue with other tasks if one fails
continue
if not created_tasks:
# If no tasks were successfully created
response.processing_successful = False
response.error = ChatProcessingError(
error_type="creation_error",
error_detail="Could not create any tasks from your message",
)
else:
# Add created tasks to response
response.tasks = created_tasks
return response
except Exception as e:
logger.exception(f"Error in chat-to-tasks endpoint: {e}")
response.processing_successful = False
response.error = ChatProcessingError(
error_type="processing_error",
error_detail=f"An error occurred while processing your request: {str(e)}",
)
return response