Implement Task Manager API with FastAPI and SQLite
This commit is contained in:
parent
6c5ef75962
commit
1f83e6545c
111
README.md
111
README.md
@ -1,3 +1,110 @@
|
||||
# FastAPI Application
|
||||
# Task Manager API
|
||||
|
||||
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
|
||||
A RESTful API for managing tasks and to-do items, built with FastAPI and SQLite.
|
||||
|
||||
## Features
|
||||
|
||||
- Create, read, update, and delete tasks
|
||||
- Filter tasks by status, priority, and completion status
|
||||
- Pagination support for listing tasks
|
||||
- Data validation with Pydantic models
|
||||
- Database migrations with Alembic
|
||||
- Comprehensive error handling
|
||||
- API documentation with Swagger UI and ReDoc
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
taskmanagerapi/
|
||||
├── alembic.ini # Alembic configuration
|
||||
├── app/ # Main application package
|
||||
│ ├── api/ # API endpoints
|
||||
│ │ ├── v1/ # API version 1
|
||||
│ │ │ ├── endpoints/ # API endpoint modules
|
||||
│ │ │ │ ├── health.py # Health check endpoint
|
||||
│ │ │ │ └── tasks.py # Task CRUD endpoints
|
||||
│ │ │ └── router.py # API router
|
||||
│ ├── core/ # Core application modules
|
||||
│ │ ├── config.py # Application configuration
|
||||
│ │ ├── error_handlers.py # Error handling middleware
|
||||
│ │ └── exceptions.py # Custom exceptions
|
||||
│ ├── db/ # Database modules
|
||||
│ │ └── session.py # Database session
|
||||
│ ├── models/ # SQLAlchemy models
|
||||
│ │ └── task.py # Task model
|
||||
│ ├── schemas/ # Pydantic schemas
|
||||
│ │ ├── responses.py # Response schemas
|
||||
│ │ └── task.py # Task schemas
|
||||
│ ├── services/ # Business logic
|
||||
│ │ └── task.py # Task service
|
||||
│ └── storage/ # Storage directory
|
||||
│ └── db/ # Database directory
|
||||
├── migrations/ # Alembic migrations
|
||||
│ ├── versions/ # Migration versions
|
||||
│ │ └── 0001_create_tasks_table.py
|
||||
│ ├── env.py # Alembic environment
|
||||
│ └── script.py.mako # Alembic script template
|
||||
├── main.py # Application entry point
|
||||
├── pyproject.toml # Project configuration
|
||||
└── requirements.txt # Project dependencies
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- **GET /api/v1/health** - Health check endpoint
|
||||
- **GET /api/v1/tasks** - List tasks with optional filtering and pagination
|
||||
- **POST /api/v1/tasks** - Create a new task
|
||||
- **GET /api/v1/tasks/{task_id}** - Get a specific task
|
||||
- **PUT /api/v1/tasks/{task_id}** - Update a specific task
|
||||
- **DELETE /api/v1/tasks/{task_id}** - Delete a specific task
|
||||
|
||||
## Task Model
|
||||
|
||||
- **id**: Integer - Task ID
|
||||
- **title**: String - Task title
|
||||
- **description**: String (optional) - Task description
|
||||
- **status**: Enum - Task status (todo, in_progress, done)
|
||||
- **priority**: Enum - Task priority (low, medium, high)
|
||||
- **due_date**: DateTime (optional) - Task due date
|
||||
- **completed**: Boolean - Whether the task is completed
|
||||
- **created_at**: DateTime - Task creation timestamp
|
||||
- **updated_at**: DateTime - Task update timestamp
|
||||
|
||||
## Requirements
|
||||
|
||||
- Python 3.8+
|
||||
- FastAPI
|
||||
- SQLAlchemy
|
||||
- Alembic
|
||||
- Pydantic
|
||||
- Uvicorn
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Clone the repository
|
||||
2. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
3. Run database migrations:
|
||||
```bash
|
||||
alembic upgrade head
|
||||
```
|
||||
4. Start the application:
|
||||
```bash
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
5. Access the API documentation:
|
||||
- Swagger UI: http://localhost:8000/docs
|
||||
- ReDoc: http://localhost:8000/redoc
|
||||
|
||||
## Development
|
||||
|
||||
- Linting:
|
||||
```bash
|
||||
ruff check .
|
||||
```
|
||||
- Auto-fix linting issues:
|
||||
```bash
|
||||
ruff check --fix .
|
||||
```
|
106
alembic.ini
Normal file
106
alembic.ini
Normal file
@ -0,0 +1,106 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# SQLite URL
|
||||
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
15
app/api/v1/endpoints/health.py
Normal file
15
app/api/v1/endpoints/health.py
Normal file
@ -0,0 +1,15 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.schemas.responses import HealthCheck
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_model=HealthCheck)
|
||||
def health_check():
|
||||
"""Health check endpoint.
|
||||
|
||||
Returns:
|
||||
Health check response
|
||||
"""
|
||||
return HealthCheck()
|
73
app/api/v1/endpoints/tasks.py
Normal file
73
app/api/v1/endpoints/tasks.py
Normal file
@ -0,0 +1,73 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Path, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.models.task import TaskPriority, TaskStatus
|
||||
from app.schemas.responses import PaginatedResponse
|
||||
from app.schemas.task import Task, TaskCreate, TaskUpdate
|
||||
from app.services.task import TaskService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_model=PaginatedResponse[Task])
|
||||
def get_tasks(
|
||||
skip: int = Query(0, ge=0, description="Number of tasks to skip"),
|
||||
limit: int = Query(100, gt=0, le=1000, description="Maximum number of tasks to return"),
|
||||
status: Optional[TaskStatus] = Query(None, description="Filter by task status"),
|
||||
priority: Optional[TaskPriority] = Query(None, description="Filter by task priority"),
|
||||
completed: Optional[bool] = Query(None, description="Filter by completion status"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a list of tasks with optional filtering and pagination.
|
||||
"""
|
||||
tasks = TaskService.get_tasks(
|
||||
db, skip=skip, limit=limit, status=status, priority=priority, completed=completed
|
||||
)
|
||||
|
||||
total = TaskService.count_tasks(
|
||||
db, status=status, priority=priority, completed=completed
|
||||
)
|
||||
|
||||
return PaginatedResponse.create(tasks, total, skip, limit)
|
||||
|
||||
|
||||
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
|
||||
def create_task(task_data: TaskCreate, db: Session = Depends(get_db)):
|
||||
"""Create a new task.
|
||||
"""
|
||||
return TaskService.create_task(db, task_data)
|
||||
|
||||
|
||||
@router.get("/{task_id}", response_model=Task)
|
||||
def get_task(
|
||||
task_id: int = Path(..., gt=0, description="The ID of the task"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a specific task by ID.
|
||||
"""
|
||||
return TaskService.get_task(db, task_id)
|
||||
|
||||
|
||||
@router.put("/{task_id}", response_model=Task)
|
||||
def update_task(
|
||||
task_data: TaskUpdate,
|
||||
task_id: int = Path(..., gt=0, description="The ID of the task"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Update a specific task by ID.
|
||||
"""
|
||||
return TaskService.update_task(db, task_id, task_data)
|
||||
|
||||
|
||||
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
|
||||
def delete_task(
|
||||
task_id: int = Path(..., gt=0, description="The ID of the task"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Delete a specific task by ID.
|
||||
"""
|
||||
TaskService.delete_task(db, task_id)
|
||||
return None
|
9
app/api/v1/router.py
Normal file
9
app/api/v1/router.py
Normal file
@ -0,0 +1,9 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.api.v1.endpoints import health, tasks
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
# Include routers for endpoints
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(tasks.router, prefix="/tasks", tags=["tasks"])
|
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
40
app/core/config.py
Normal file
40
app/core/config.py
Normal file
@ -0,0 +1,40 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import field_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings."""
|
||||
|
||||
# API
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Application
|
||||
PROJECT_NAME: str = "Task Manager API"
|
||||
PROJECT_DESCRIPTION: str = "API for managing tasks and to-do items"
|
||||
VERSION: str = "0.1.0"
|
||||
|
||||
# CORS
|
||||
CORS_ORIGINS: List[str] = ["*"]
|
||||
|
||||
# SQLite Database
|
||||
DB_DIR: Path = Path("/app/storage/db")
|
||||
DB_NAME: str = "db.sqlite"
|
||||
SQLALCHEMY_DATABASE_URL: Optional[str] = None
|
||||
|
||||
@field_validator("SQLALCHEMY_DATABASE_URL", mode="before")
|
||||
def assemble_db_url(cls, v: Optional[str], info) -> str:
|
||||
if v is not None:
|
||||
return v
|
||||
|
||||
db_dir = info.data.get("DB_DIR")
|
||||
db_name = info.data.get("DB_NAME")
|
||||
|
||||
db_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return f"sqlite:///{db_dir}/{db_name}"
|
||||
|
||||
|
||||
settings = Settings()
|
110
app/core/error_handlers.py
Normal file
110
app/core/error_handlers.py
Normal file
@ -0,0 +1,110 @@
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app.core.exceptions import TaskManagerException
|
||||
from app.schemas.responses import ErrorResponse
|
||||
|
||||
|
||||
def add_error_handlers(app: FastAPI) -> None:
|
||||
"""Add error handlers to the FastAPI application.
|
||||
|
||||
Args:
|
||||
app: FastAPI application
|
||||
"""
|
||||
|
||||
@app.exception_handler(TaskManagerException)
|
||||
async def task_manager_exception_handler(
|
||||
request: Request, exc: TaskManagerException
|
||||
) -> JSONResponse:
|
||||
"""Handle TaskManagerException.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: TaskManagerException
|
||||
|
||||
Returns:
|
||||
JSONResponse with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content=ErrorResponse(detail=exc.detail).model_dump(),
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def validation_exception_handler(
|
||||
request: Request, exc: RequestValidationError
|
||||
) -> JSONResponse:
|
||||
"""Handle RequestValidationError.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: RequestValidationError
|
||||
|
||||
Returns:
|
||||
JSONResponse with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content=ErrorResponse(detail=str(exc)).model_dump(),
|
||||
)
|
||||
|
||||
@app.exception_handler(ValidationError)
|
||||
async def pydantic_validation_exception_handler(
|
||||
request: Request, exc: ValidationError
|
||||
) -> JSONResponse:
|
||||
"""Handle Pydantic ValidationError.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: ValidationError
|
||||
|
||||
Returns:
|
||||
JSONResponse with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content=ErrorResponse(detail=str(exc)).model_dump(),
|
||||
)
|
||||
|
||||
@app.exception_handler(SQLAlchemyError)
|
||||
async def sqlalchemy_exception_handler(
|
||||
request: Request, exc: SQLAlchemyError
|
||||
) -> JSONResponse:
|
||||
"""Handle SQLAlchemy errors.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: SQLAlchemyError
|
||||
|
||||
Returns:
|
||||
JSONResponse with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=ErrorResponse(
|
||||
detail="Database error occurred. Please try again later."
|
||||
).model_dump(),
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def general_exception_handler(
|
||||
request: Request, exc: Exception
|
||||
) -> JSONResponse:
|
||||
"""Handle general exceptions.
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
exc: Exception
|
||||
|
||||
Returns:
|
||||
JSONResponse with error details
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=ErrorResponse(
|
||||
detail="An unexpected error occurred. Please try again later."
|
||||
).model_dump(),
|
||||
)
|
54
app/core/exceptions.py
Normal file
54
app/core/exceptions.py
Normal file
@ -0,0 +1,54 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class TaskManagerException(HTTPException):
|
||||
"""Base exception for the Task Manager API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status_code: int,
|
||||
detail: Any = None,
|
||||
headers: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
||||
|
||||
|
||||
class TaskNotFoundException(TaskManagerException):
|
||||
"""Exception raised when a task is not found."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = "Task not found",
|
||||
headers: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=detail, headers=headers
|
||||
)
|
||||
|
||||
|
||||
class ValidationException(TaskManagerException):
|
||||
"""Exception raised when validation fails."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = "Validation error",
|
||||
headers: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=detail, headers=headers
|
||||
)
|
||||
|
||||
|
||||
class DatabaseException(TaskManagerException):
|
||||
"""Exception raised when a database operation fails."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = "Database operation failed",
|
||||
headers: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=detail, headers=headers
|
||||
)
|
0
app/db/__init__.py
Normal file
0
app/db/__init__.py
Normal file
30
app/db/session.py
Normal file
30
app/db/session.py
Normal file
@ -0,0 +1,30 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
# Create SQLAlchemy engine
|
||||
engine = create_engine(
|
||||
settings.SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False}
|
||||
)
|
||||
|
||||
# Create SessionLocal class
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# Create Base class for models
|
||||
Base = declarative_base()
|
||||
|
||||
# Dependency for getting DB session
|
||||
def get_db():
|
||||
"""Dependency function that yields DB sessions.
|
||||
|
||||
Yields:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
3
app/models/__init__.py
Normal file
3
app/models/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from app.models.task import Task, TaskPriority, TaskStatus
|
||||
|
||||
__all__ = ["Task", "TaskStatus", "TaskPriority"]
|
56
app/models/task.py
Normal file
56
app/models/task.py
Normal file
@ -0,0 +1,56 @@
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, Enum, Integer, String, Text
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.db.session import Base
|
||||
|
||||
|
||||
class TaskStatus(str, PyEnum):
|
||||
"""Enum for task status."""
|
||||
|
||||
TODO = "todo"
|
||||
IN_PROGRESS = "in_progress"
|
||||
DONE = "done"
|
||||
|
||||
|
||||
class TaskPriority(str, PyEnum):
|
||||
"""Enum for task priority."""
|
||||
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
|
||||
|
||||
class Task(Base):
|
||||
"""SQLAlchemy model for tasks."""
|
||||
|
||||
__tablename__ = "tasks"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
title = Column(String(255), nullable=False, index=True)
|
||||
description = Column(Text, nullable=True)
|
||||
status = Column(
|
||||
Enum(TaskStatus),
|
||||
default=TaskStatus.TODO,
|
||||
nullable=False,
|
||||
index=True
|
||||
)
|
||||
priority = Column(
|
||||
Enum(TaskPriority),
|
||||
default=TaskPriority.MEDIUM,
|
||||
nullable=False,
|
||||
index=True
|
||||
)
|
||||
due_date = Column(DateTime, nullable=True)
|
||||
completed = Column(Boolean, default=False, nullable=False, index=True)
|
||||
created_at = Column(DateTime, default=func.now(), nullable=False)
|
||||
updated_at = Column(
|
||||
DateTime,
|
||||
default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Task(id={self.id}, title={self.title}, status={self.status})"
|
19
app/schemas/__init__.py
Normal file
19
app/schemas/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
from app.schemas.responses import (
|
||||
ErrorResponse,
|
||||
HealthCheck,
|
||||
PaginatedResponse,
|
||||
PaginationParams,
|
||||
)
|
||||
from app.schemas.task import Task, TaskBase, TaskCreate, TaskInDB, TaskUpdate
|
||||
|
||||
__all__ = [
|
||||
"TaskBase",
|
||||
"TaskCreate",
|
||||
"TaskUpdate",
|
||||
"TaskInDB",
|
||||
"Task",
|
||||
"HealthCheck",
|
||||
"ErrorResponse",
|
||||
"PaginationParams",
|
||||
"PaginatedResponse",
|
||||
]
|
49
app/schemas/responses.py
Normal file
49
app/schemas/responses.py
Normal file
@ -0,0 +1,49 @@
|
||||
from typing import Generic, List, TypeVar
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class HealthCheck(BaseModel):
|
||||
"""Health check response schema."""
|
||||
|
||||
status: str = "ok"
|
||||
message: str = "Application is healthy"
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
"""Error response schema."""
|
||||
|
||||
detail: str
|
||||
|
||||
|
||||
class PaginationParams(BaseModel):
|
||||
"""Pagination parameters schema."""
|
||||
|
||||
skip: int = Field(0, ge=0, description="Number of items to skip")
|
||||
limit: int = Field(100, gt=0, le=1000, description="Maximum number of items to return")
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
"""Paginated response schema."""
|
||||
|
||||
items: List[T]
|
||||
total: int
|
||||
page: int
|
||||
pages: int
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, items: List[T], total: int, skip: int = 0, limit: int = 100
|
||||
) -> "PaginatedResponse[T]":
|
||||
"""Create a paginated response."""
|
||||
page = skip // limit + 1 if limit > 0 else 1
|
||||
pages = (total + limit - 1) // limit if limit > 0 else 1
|
||||
|
||||
return cls(
|
||||
items=items,
|
||||
total=total,
|
||||
page=page,
|
||||
pages=pages,
|
||||
)
|
52
app/schemas/task.py
Normal file
52
app/schemas/task.py
Normal file
@ -0,0 +1,52 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from app.models.task import TaskPriority, TaskStatus
|
||||
|
||||
|
||||
class TaskBase(BaseModel):
|
||||
"""Base model for task schema."""
|
||||
|
||||
title: str = Field(..., min_length=1, max_length=255, description="Task title")
|
||||
description: Optional[str] = Field(None, description="Task description")
|
||||
status: TaskStatus = Field(default=TaskStatus.TODO, description="Task status")
|
||||
priority: TaskPriority = Field(default=TaskPriority.MEDIUM, description="Task priority")
|
||||
due_date: Optional[datetime] = Field(None, description="Due date")
|
||||
completed: bool = Field(default=False, description="Whether the task is completed")
|
||||
|
||||
|
||||
class TaskCreate(TaskBase):
|
||||
"""Schema for creating a new task."""
|
||||
pass
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
"""Schema for updating an existing task."""
|
||||
|
||||
title: Optional[str] = Field(None, min_length=1, max_length=255, description="Task title")
|
||||
description: Optional[str] = Field(None, description="Task description")
|
||||
status: Optional[TaskStatus] = Field(None, description="Task status")
|
||||
priority: Optional[TaskPriority] = Field(None, description="Task priority")
|
||||
due_date: Optional[datetime] = Field(None, description="Due date")
|
||||
completed: Optional[bool] = Field(None, description="Whether the task is completed")
|
||||
|
||||
|
||||
class TaskInDB(TaskBase):
|
||||
"""Schema for task in database."""
|
||||
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
"""Configuration for the model."""
|
||||
|
||||
orm_mode = True
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class Task(TaskInDB):
|
||||
"""Schema for task response."""
|
||||
pass
|
3
app/services/__init__.py
Normal file
3
app/services/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from app.services.task import TaskService
|
||||
|
||||
__all__ = ["TaskService"]
|
173
app/services/task.py
Normal file
173
app/services/task.py
Normal file
@ -0,0 +1,173 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.core.exceptions import TaskNotFoundException
|
||||
from app.models.task import Task, TaskPriority, TaskStatus
|
||||
from app.schemas.task import TaskCreate, TaskUpdate
|
||||
|
||||
|
||||
class TaskService:
|
||||
"""Service for task operations."""
|
||||
|
||||
@staticmethod
|
||||
def get_tasks(
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: Optional[TaskStatus] = None,
|
||||
priority: Optional[TaskPriority] = None,
|
||||
completed: Optional[bool] = None,
|
||||
) -> List[Task]:
|
||||
"""Get a list of tasks with optional filtering.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
skip: Number of items to skip
|
||||
limit: Maximum number of items to return
|
||||
status: Filter by task status
|
||||
priority: Filter by task priority
|
||||
completed: Filter by completion status
|
||||
|
||||
Returns:
|
||||
List of tasks
|
||||
"""
|
||||
query = select(Task)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(Task.status == status)
|
||||
|
||||
if priority is not None:
|
||||
query = query.where(Task.priority == priority)
|
||||
|
||||
if completed is not None:
|
||||
query = query.where(Task.completed == completed)
|
||||
|
||||
query = query.offset(skip).limit(limit)
|
||||
|
||||
return db.execute(query).scalars().all()
|
||||
|
||||
@staticmethod
|
||||
def count_tasks(
|
||||
db: Session,
|
||||
status: Optional[TaskStatus] = None,
|
||||
priority: Optional[TaskPriority] = None,
|
||||
completed: Optional[bool] = None,
|
||||
) -> int:
|
||||
"""Count tasks with optional filtering.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
status: Filter by task status
|
||||
priority: Filter by task priority
|
||||
completed: Filter by completion status
|
||||
|
||||
Returns:
|
||||
Number of tasks
|
||||
"""
|
||||
query = select(func.count(Task.id))
|
||||
|
||||
if status is not None:
|
||||
query = query.where(Task.status == status)
|
||||
|
||||
if priority is not None:
|
||||
query = query.where(Task.priority == priority)
|
||||
|
||||
if completed is not None:
|
||||
query = query.where(Task.completed == completed)
|
||||
|
||||
return db.execute(query).scalar_one()
|
||||
|
||||
@staticmethod
|
||||
def get_task(db: Session, task_id: int) -> Task:
|
||||
"""Get a task by ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
task_id: ID of the task
|
||||
|
||||
Returns:
|
||||
Task
|
||||
|
||||
Raises:
|
||||
HTTPException: If task is not found
|
||||
"""
|
||||
task = db.get(Task, task_id)
|
||||
|
||||
if task is None:
|
||||
raise TaskNotFoundException(detail=f"Task with id {task_id} not found")
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def create_task(db: Session, task_data: TaskCreate) -> Task:
|
||||
"""Create a new task.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
task_data: Task data
|
||||
|
||||
Returns:
|
||||
Created task
|
||||
"""
|
||||
task = Task(**task_data.model_dump())
|
||||
|
||||
db.add(task)
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def update_task(db: Session, task_id: int, task_data: TaskUpdate) -> Task:
|
||||
"""Update an existing task.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
task_id: ID of the task
|
||||
task_data: Updated task data
|
||||
|
||||
Returns:
|
||||
Updated task
|
||||
|
||||
Raises:
|
||||
HTTPException: If task is not found
|
||||
"""
|
||||
task = TaskService.get_task(db, task_id)
|
||||
|
||||
data = task_data.model_dump(exclude_unset=True)
|
||||
|
||||
for key, value in data.items():
|
||||
setattr(task, key, value)
|
||||
|
||||
# Update the updated_at field
|
||||
task.updated_at = datetime.now()
|
||||
|
||||
# If status is changed to DONE, mark as completed
|
||||
if task_data.status == TaskStatus.DONE:
|
||||
task.completed = True
|
||||
|
||||
db.add(task)
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def delete_task(db: Session, task_id: int) -> None:
|
||||
"""Delete a task.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
task_id: ID of the task
|
||||
|
||||
Raises:
|
||||
HTTPException: If task is not found
|
||||
"""
|
||||
task = TaskService.get_task(db, task_id)
|
||||
|
||||
db.delete(task)
|
||||
db.commit()
|
41
main.py
Normal file
41
main.py
Normal file
@ -0,0 +1,41 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.api.v1.router import api_router
|
||||
from app.core.config import settings
|
||||
from app.core.error_handlers import add_error_handlers
|
||||
|
||||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
description=settings.PROJECT_DESCRIPTION,
|
||||
version=settings.VERSION,
|
||||
openapi_url=f"{settings.API_V1_STR}/openapi.json",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
)
|
||||
|
||||
# Set up CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Add error handlers
|
||||
add_error_handlers(app)
|
||||
|
||||
# Include API router
|
||||
app.include_router(api_router, prefix=settings.API_V1_STR)
|
||||
|
||||
# Root path redirect to docs
|
||||
@app.get("/", include_in_schema=False)
|
||||
async def root_redirect():
|
||||
"""Redirect root path to API documentation."""
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/docs")
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
83
migrations/env.py
Normal file
83
migrations/env.py
Normal file
@ -0,0 +1,83 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Import database models to be included in migrations
|
||||
from app.db.session import Base
|
||||
from app.models import Task
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
is_sqlite = connection.dialect.name == 'sqlite'
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=is_sqlite, # Enable batch mode for SQLite
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
41
migrations/versions/0001_create_tasks_table.py
Normal file
41
migrations/versions/0001_create_tasks_table.py
Normal file
@ -0,0 +1,41 @@
|
||||
"""create tasks table
|
||||
|
||||
Revision ID: 0001
|
||||
Revises:
|
||||
Create Date: 2023-08-01
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
'tasks',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('title', sa.String(255), nullable=False, index=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.Enum('todo', 'in_progress', 'done', name='taskstatus'),
|
||||
nullable=False, default='todo', index=True),
|
||||
sa.Column('priority', sa.Enum('low', 'medium', 'high', name='taskpriority'),
|
||||
nullable=False, default='medium', index=True),
|
||||
sa.Column('due_date', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed', sa.Boolean(), nullable=False, default=False, index=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, default=func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, default=func.now(), onupdate=func.now()),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index('ix_tasks_id', 'tasks', ['id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index('ix_tasks_id', 'tasks')
|
||||
op.drop_table('tasks')
|
41
pyproject.toml
Normal file
41
pyproject.toml
Normal file
@ -0,0 +1,41 @@
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle errors
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"B", # flake8-bugbear
|
||||
"C4", # flake8-comprehensions
|
||||
"UP", # pyupgrade
|
||||
"D", # pydocstyle
|
||||
"N", # pep8-naming
|
||||
]
|
||||
|
||||
ignore = [
|
||||
"D100", # Missing docstring in public module
|
||||
"D104", # Missing docstring in public package
|
||||
"D106", # Missing docstring in public nested class
|
||||
"D203", # 1 blank line required before class docstring
|
||||
"D213", # Multi-line docstring summary should start at the second line
|
||||
"E501", # Line too long (covered by line-length)
|
||||
"D200", # One-line docstring should fit on one line
|
||||
"D212", # Multi-line docstring summary should start at the first line
|
||||
"D107", # Missing docstring in __init__
|
||||
"D105", # Missing docstring in magic method
|
||||
"D102", # Missing docstring in public method
|
||||
"N805", # First argument of a method should be named `self`
|
||||
"B008", # Do not perform function call in argument defaults
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"migrations/*" = ["D", "E", "F", "I", "N"]
|
||||
"__init__.py" = ["D", "F401"]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
known-first-party = ["app"]
|
||||
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
9
requirements.txt
Normal file
9
requirements.txt
Normal file
@ -0,0 +1,9 @@
|
||||
fastapi>=0.103.1,<0.104.0
|
||||
uvicorn>=0.23.2,<0.24.0
|
||||
sqlalchemy>=2.0.20,<2.1.0
|
||||
alembic>=1.12.0,<1.13.0
|
||||
pydantic>=2.3.0,<2.4.0
|
||||
pydantic-settings>=2.0.3,<2.1.0
|
||||
python-multipart>=0.0.6,<0.1.0
|
||||
email-validator>=2.0.0,<2.1.0
|
||||
ruff>=0.0.292,<0.1.0
|
Loading…
x
Reference in New Issue
Block a user