initial commit

This commit is contained in:
Agent@BackendIM 2025-05-17 09:29:49 +00:00
parent 4f9c7b9c71
commit dfbfa75251

View File

@ -41,7 +41,7 @@ class OpenAIService(LLMService):
except (ImportError, AttributeError) as e: except (ImportError, AttributeError) as e:
logger.error(f"Failed to initialize OpenAI service: {e}") logger.error(f"Failed to initialize OpenAI service: {e}")
raise RuntimeError(f"OpenAI service initialization failed: {e}") raise RuntimeError(f"OpenAI service initialization failed: {e}")
async def chat_to_tasks(self, prompt: str) -> List[Dict]: async def chat_to_tasks(self, prompt: str) -> List[Dict]:
""" """
Convert natural language to tasks using OpenAI. Convert natural language to tasks using OpenAI.
@ -63,7 +63,7 @@ class OpenAIService(LLMService):
Return ONLY a JSON array of task objects without any additional text or explanation. Return ONLY a JSON array of task objects without any additional text or explanation.
""" """
try: try:
response = await self.client.chat.completions.create( response = await self.client.chat.completions.create(
model=self.model, model=self.model,
@ -74,16 +74,14 @@ class OpenAIService(LLMService):
response_format={"type": "json_object"}, response_format={"type": "json_object"},
temperature=0.2, temperature=0.2,
) )
# Extract the JSON content from the response
content = response.choices[0].message.content content = response.choices[0].message.content
result = json.loads(content) result = json.loads(content)
# Expect a "tasks" key in the response JSON
if "tasks" in result: if "tasks" in result:
return result["tasks"] return result["tasks"]
return [result] # Return as a single-item list if no "tasks" key return [result]
except Exception as e: except Exception as e:
logger.error(f"OpenAI API error: {e}") logger.error(f"OpenAI API error: {e}")
raise RuntimeError(f"Failed to process request with OpenAI: {e}") raise RuntimeError(f"Failed to process request with OpenAI: {e}")
@ -91,7 +89,7 @@ class OpenAIService(LLMService):
class GeminiService(LLMService): class GeminiService(LLMService):
"""Google Gemini implementation of LLM service.""" """Google Gemini implementation of LLM service."""
def __init__(self): def __init__(self):
"""Initialize the Gemini service.""" """Initialize the Gemini service."""
try: try:
@ -101,56 +99,62 @@ class GeminiService(LLMService):
except (ImportError, AttributeError) as e: except (ImportError, AttributeError) as e:
logger.error(f"Failed to initialize Gemini service: {e}") logger.error(f"Failed to initialize Gemini service: {e}")
raise RuntimeError(f"Gemini service initialization failed: {e}") raise RuntimeError(f"Gemini service initialization failed: {e}")
async def chat_to_tasks(self, prompt: str) -> List[Dict]: async def chat_to_tasks(self, prompt: str) -> List[Dict]:
""" """
Convert natural language to tasks using Google Gemini. Convert natural language to tasks using Google Gemini.
Args: Args:
prompt: User's natural language input prompt: User's natural language input
Returns: Returns:
List of task dictionaries List of task dictionaries
""" """
system_prompt = """ system_prompt = (
You are a task extraction assistant. Your job is to convert the user's natural language "You are a task extraction assistant. Your job is to convert the user's natural language "
input into one or more structured task objects. Each task should have these properties: "input into one or more structured task objects. Each task should have these properties:\n"
- title: A short, clear title for the task "- title: A short, clear title for the task\n"
- description: A more detailed description of what needs to be done "- description: A more detailed description of what needs to be done\n"
- due_date: When the task is due (ISO format date string, or null if not specified) "- due_date: When the task is due (ISO format date string, or null if not specified)\n"
- priority: The priority level (high, medium, low) "- priority: The priority level (high, medium, low)\n"
- status: The status (defaults to "pending") "- status: The status (defaults to \"pending\")\n\n"
"Return ONLY a JSON object with a \"tasks\" key that contains an array of task objects. "
Return ONLY a JSON object with a "tasks" key that contains an array of task objects. "Do not include any text or explanation outside the JSON."
Do not include any text or explanation outside the JSON. )
"""
try: try:
# Start the chat session with the system prompt
chat = self.model.start_chat(history=[ chat = self.model.start_chat(history=[
{"role": "system", "content": system_prompt} {
"content": {
"parts": [system_prompt]
}
}
]) ])
# Send the user prompt asynchronously response = await chat.send_message_async(
response = await chat.send_message_async(prompt) {
"content": {
"parts": [prompt]
}
}
)
content = response.text content = response.text
# Remove possible markdown code blocks wrapping the JSON response # Remove possible markdown code blocks
if "```json" in content: if "```json" in content:
json_str = content.split("```json")[1].split("```")[0].strip() json_str = content.split("```json")[1].split("```")[0].strip()
elif "```" in content: elif "```" in content:
json_str = content.split("```")[1].strip() json_str = content.split("```")[1].strip()
else: else:
json_str = content.strip() json_str = content.strip()
# Parse JSON response
result = json.loads(json_str) result = json.loads(json_str)
# Return the list under "tasks" or the whole as single-item list
if "tasks" in result: if "tasks" in result:
return result["tasks"] return result["tasks"]
return [result] return [result]
except Exception as e: except Exception as e:
logger.error(f"Gemini API error: {e}") logger.error(f"Gemini API error: {e}")
raise RuntimeError(f"Failed to process request with Gemini: {e}") raise RuntimeError(f"Failed to process request with Gemini: {e}")
@ -158,25 +162,25 @@ class GeminiService(LLMService):
class MockLLMService(LLMService): class MockLLMService(LLMService):
"""Mock LLM service for testing.""" """Mock LLM service for testing."""
async def chat_to_tasks(self, prompt: str) -> List[Dict]: async def chat_to_tasks(self, prompt: str) -> List[Dict]:
""" """
Return mock tasks based on the prompt. Return mock tasks based on the prompt.
Args: Args:
prompt: User's natural language input prompt: User's natural language input
Returns: Returns:
List of task dictionaries List of task dictionaries
""" """
words = prompt.lower().split() words = prompt.lower().split()
priority = "medium" priority = "medium"
if "urgent" in words or "important" in words: if "urgent" in words or "important" in words:
priority = "high" priority = "high"
elif "low" in words or "minor" in words: elif "low" in words or "minor" in words:
priority = "low" priority = "low"
return [{ return [{
"title": prompt[:50] + ("..." if len(prompt) > 50 else ""), "title": prompt[:50] + ("..." if len(prompt) > 50 else ""),
"description": prompt, "description": prompt,
@ -189,7 +193,7 @@ class MockLLMService(LLMService):
def get_llm_service() -> LLMService: def get_llm_service() -> LLMService:
""" """
Factory function for LLM service dependency injection. Factory function for LLM service dependency injection.
Returns: Returns:
An instance of a concrete LLMService implementation An instance of a concrete LLMService implementation
""" """