initial commit

This commit is contained in:
Agent@BackendIM 2025-05-17 09:29:49 +00:00
parent 4f9c7b9c71
commit dfbfa75251

View File

@ -75,14 +75,12 @@ class OpenAIService(LLMService):
temperature=0.2,
)
# Extract the JSON content from the response
content = response.choices[0].message.content
result = json.loads(content)
# Expect a "tasks" key in the response JSON
if "tasks" in result:
return result["tasks"]
return [result] # Return as a single-item list if no "tasks" key
return [result]
except Exception as e:
logger.error(f"OpenAI API error: {e}")
@ -112,30 +110,38 @@ class GeminiService(LLMService):
Returns:
List of task dictionaries
"""
system_prompt = """
You are a task extraction assistant. Your job is to convert the user's natural language
input into one or more structured task objects. Each task should have these properties:
- title: A short, clear title for the task
- description: A more detailed description of what needs to be done
- due_date: When the task is due (ISO format date string, or null if not specified)
- priority: The priority level (high, medium, low)
- status: The status (defaults to "pending")
Return ONLY a JSON object with a "tasks" key that contains an array of task objects.
Do not include any text or explanation outside the JSON.
"""
system_prompt = (
"You are a task extraction assistant. Your job is to convert the user's natural language "
"input into one or more structured task objects. Each task should have these properties:\n"
"- title: A short, clear title for the task\n"
"- description: A more detailed description of what needs to be done\n"
"- due_date: When the task is due (ISO format date string, or null if not specified)\n"
"- priority: The priority level (high, medium, low)\n"
"- status: The status (defaults to \"pending\")\n\n"
"Return ONLY a JSON object with a \"tasks\" key that contains an array of task objects. "
"Do not include any text or explanation outside the JSON."
)
try:
# Start the chat session with the system prompt
chat = self.model.start_chat(history=[
{"role": "system", "content": system_prompt}
{
"content": {
"parts": [system_prompt]
}
}
])
# Send the user prompt asynchronously
response = await chat.send_message_async(prompt)
response = await chat.send_message_async(
{
"content": {
"parts": [prompt]
}
}
)
content = response.text
# Remove possible markdown code blocks wrapping the JSON response
# Remove possible markdown code blocks
if "```json" in content:
json_str = content.split("```json")[1].split("```")[0].strip()
elif "```" in content:
@ -143,10 +149,8 @@ class GeminiService(LLMService):
else:
json_str = content.strip()
# Parse JSON response
result = json.loads(json_str)
# Return the list under "tasks" or the whole as single-item list
if "tasks" in result:
return result["tasks"]
return [result]