126 lines
4.4 KiB
Python
126 lines
4.4 KiB
Python
"""
|
|
Router for the chat-to-tasks functionality.
|
|
"""
|
|
|
|
import logging
|
|
from fastapi import APIRouter, Depends, HTTPException
|
|
from sqlalchemy.orm import Session
|
|
|
|
from app.api import deps
|
|
from app.crud import task as task_crud
|
|
from app.models.user import User
|
|
from app.schemas.chat import ChatInput, ChatResponse, ChatProcessingError
|
|
from app.schemas.task import TaskCreate, TaskRead
|
|
from app.services.llm_service import LLMService, get_llm_service
|
|
from app.db.session import get_db
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
@router.post("/chat-to-tasks", response_model=ChatResponse)
|
|
async def create_tasks_from_chat(
|
|
chat_input: ChatInput,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(deps.get_current_active_user),
|
|
llm_service: LLMService = Depends(get_llm_service),
|
|
):
|
|
"""
|
|
Convert natural language chat input into one or more structured task objects.
|
|
|
|
This endpoint now returns internal debug logs in the response for easier debugging.
|
|
"""
|
|
message = chat_input.message.strip()
|
|
if len(message) < 3:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Message must be at least 3 characters long",
|
|
)
|
|
|
|
response = ChatResponse(original_message=message)
|
|
debug_logs = [] # Collect debug logs here
|
|
|
|
def log_debug(msg):
|
|
debug_logs.append(msg)
|
|
logger.debug(msg)
|
|
|
|
try:
|
|
log_debug(f"Received chat input for task extraction: {message[:50]}...")
|
|
|
|
# Extract tasks from the LLM service
|
|
llm_tasks = await llm_service.chat_to_tasks(message)
|
|
log_debug(f"LLM service raw output: {llm_tasks}")
|
|
|
|
if not llm_tasks:
|
|
log_debug("LLM service returned no tasks.")
|
|
response.processing_successful = False
|
|
response.error = ChatProcessingError(
|
|
error_type="parsing_error",
|
|
error_detail="No tasks could be extracted from your message.",
|
|
)
|
|
response.debug_logs = debug_logs # Attach logs to response
|
|
return response
|
|
|
|
created_tasks = []
|
|
|
|
for idx, task_data in enumerate(llm_tasks, 1):
|
|
try:
|
|
task_create_data = {
|
|
"title": task_data.get("title", "Untitled Task"),
|
|
"description": task_data.get("description", ""),
|
|
"priority": task_data.get("priority", "medium").lower(),
|
|
}
|
|
|
|
due_date = task_data.get("due_date")
|
|
if due_date and due_date != "null":
|
|
task_create_data["due_date"] = due_date
|
|
|
|
status = task_data.get("status", "").lower()
|
|
if status == "pending":
|
|
task_create_data["status"] = "todo"
|
|
elif status:
|
|
task_create_data["status"] = status
|
|
|
|
log_debug(f"Task #{idx} data prepared for creation: {task_create_data}")
|
|
|
|
task_in = TaskCreate(**task_create_data)
|
|
|
|
db_task = task_crud.create_with_owner(
|
|
db=db,
|
|
obj_in=task_in,
|
|
user_id=current_user.id,
|
|
)
|
|
|
|
created_tasks.append(TaskRead.model_validate(db_task))
|
|
log_debug(f"Task #{idx} created successfully with ID {db_task.id}")
|
|
|
|
except Exception as task_exc:
|
|
err_msg = f"Failed to create task #{idx} from LLM data: {task_exc}. Data: {task_data}"
|
|
log_debug(err_msg)
|
|
# Continue processing remaining tasks even if one fails
|
|
|
|
if not created_tasks:
|
|
response.processing_successful = False
|
|
response.error = ChatProcessingError(
|
|
error_type="creation_error",
|
|
error_detail="Failed to create any tasks from the provided message.",
|
|
)
|
|
else:
|
|
response.tasks = created_tasks
|
|
response.processing_successful = True
|
|
|
|
response.debug_logs = debug_logs
|
|
return response
|
|
|
|
except Exception as exc:
|
|
err_msg = f"Unexpected error in chat-to-tasks endpoint: {exc}"
|
|
log_debug(err_msg)
|
|
response.processing_successful = False
|
|
response.error = ChatProcessingError(
|
|
error_type="processing_error",
|
|
error_detail=f"An error occurred while processing your request: {str(exc)}",
|
|
)
|
|
response.debug_logs = debug_logs
|
|
return response
|