Implement Task Management API with FastAPI and SQLite

This commit is contained in:
Automated Action 2025-05-18 19:36:06 +00:00
parent 6a20605bae
commit 47e91dc074
16 changed files with 664 additions and 2 deletions

View File

@ -1,3 +1,91 @@
# FastAPI Application
# Task Management API
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
A simple task management API built with FastAPI and SQLite.
## Features
- Create, read, update, and delete tasks
- Mark tasks as completed
- Filter tasks by completion status and priority
- Set due dates for tasks
- Health check endpoint
## Tech Stack
- Python 3.9+
- FastAPI for API development
- SQLAlchemy for ORM
- Alembic for database migrations
- SQLite for database storage
- Pydantic for data validation
- Uvicorn for ASGI server
## Project Structure
```
.
├── app/ # Application package
│ ├── api/ # API endpoints
│ │ └── routes/ # API route definitions
│ ├── core/ # Core configuration
│ ├── db/ # Database configurations
│ ├── models/ # SQLAlchemy models
│ └── schemas/ # Pydantic schemas
├── migrations/ # Alembic migration scripts
├── alembic.ini # Alembic configuration
├── main.py # Entry point for the application
└── requirements.txt # Dependencies
```
## Getting Started
### Prerequisites
- Python 3.9+
- SQLite
### Installation
1. Clone the repository
2. Install dependencies:
```
pip install -r requirements.txt
```
3. Run the application:
```
uvicorn main:app --reload
```
## API Documentation
Once the application is running, you can access the API documentation at:
- Swagger UI: http://localhost:8000/docs
- ReDoc: http://localhost:8000/redoc
## API Endpoints
### Health Check
- `GET /health` - Check if the API is running
### Task Management
- `GET /api/tasks` - List all tasks
- `POST /api/tasks` - Create a new task
- `GET /api/tasks/{task_id}` - Get a specific task
- `PUT /api/tasks/{task_id}` - Update a task
- `DELETE /api/tasks/{task_id}` - Delete a task
- `POST /api/tasks/{task_id}/complete` - Mark a task as completed
## Task Model
- `id`: Unique identifier
- `title`: Title of the task
- `description`: Optional description
- `is_completed`: Task completion status
- `priority`: Task priority (1=Low, 2=Medium, 3=High)
- `created_at`: Creation timestamp
- `updated_at`: Last update timestamp
- `completed_at`: Completion timestamp (if completed)
- `due_date`: Optional due date

86
alembic.ini Normal file
View File

@ -0,0 +1,86 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
# Use absolute path as instructed
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,7 @@
from fastapi import APIRouter
from app.api.routes import tasks
router = APIRouter(prefix="/api")
router.include_router(tasks.router, prefix="/tasks", tags=["Tasks"])

136
app/api/routes/tasks.py Normal file
View File

@ -0,0 +1,136 @@
from datetime import datetime
from typing import List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from app.db.session import get_db
from app.models.task import Task
from app.schemas.task import TaskCreate, TaskUpdate, TaskResponse, PriorityEnum
router = APIRouter()
@router.post("", response_model=TaskResponse, status_code=status.HTTP_201_CREATED)
def create_task(*, db: Session = Depends(get_db), task_in: TaskCreate):
"""Create a new task"""
db_task = Task(
title=task_in.title,
description=task_in.description,
priority=task_in.priority,
due_date=task_in.due_date
)
db.add(db_task)
db.commit()
db.refresh(db_task)
return db_task
@router.get("", response_model=List[TaskResponse])
def read_tasks(
*,
db: Session = Depends(get_db),
skip: int = 0,
limit: int = 100,
is_completed: Optional[bool] = None,
priority: Optional[PriorityEnum] = None
):
"""
Retrieve tasks with optional filtering
"""
query = db.query(Task)
# Apply filters if provided
if is_completed is not None:
query = query.filter(Task.is_completed == is_completed)
if priority is not None:
query = query.filter(Task.priority == priority)
# Apply pagination
query = query.offset(skip).limit(limit)
# Order by creation date (newest first)
query = query.order_by(Task.created_at.desc())
return query.all()
@router.get("/{task_id}", response_model=TaskResponse)
def read_task(*, db: Session = Depends(get_db), task_id: int):
"""Get a specific task by ID"""
task = db.query(Task).filter(Task.id == task_id).first()
if not task:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Task not found"
)
return task
@router.put("/{task_id}", response_model=TaskResponse)
def update_task(
*,
db: Session = Depends(get_db),
task_id: int,
task_in: TaskUpdate
):
"""Update a task"""
task = db.query(Task).filter(Task.id == task_id).first()
if not task:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Task not found"
)
update_data = task_in.model_dump(exclude_unset=True)
# If task is being marked as completed, set the completed_at timestamp
if "is_completed" in update_data and update_data["is_completed"] and not task.is_completed:
update_data["completed_at"] = datetime.utcnow()
# Reset completed_at if task is being marked as not completed
if "is_completed" in update_data and not update_data["is_completed"] and task.is_completed:
update_data["completed_at"] = None
for field, value in update_data.items():
setattr(task, field, value)
db.add(task)
db.commit()
db.refresh(task)
return task
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
def delete_task(*, db: Session = Depends(get_db), task_id: int):
"""Delete a task"""
task = db.query(Task).filter(Task.id == task_id).first()
if not task:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Task not found"
)
db.delete(task)
db.commit()
return None
@router.post("/{task_id}/complete", response_model=TaskResponse)
def mark_task_as_completed(*, db: Session = Depends(get_db), task_id: int):
"""Mark a task as completed"""
task = db.query(Task).filter(Task.id == task_id).first()
if not task:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Task not found"
)
# Only update if the task is not already completed
if not task.is_completed:
task.mark_as_completed()
db.add(task)
db.commit()
db.refresh(task)
return task

33
app/core/config.py Normal file
View File

@ -0,0 +1,33 @@
from pathlib import Path
from pydantic_settings import BaseSettings
from typing import Optional
class Settings(BaseSettings):
# Application settings
PROJECT_NAME: str = "Task Management API"
PROJECT_DESCRIPTION: str = "A simple task management API using FastAPI and SQLite"
VERSION: str = "0.1.0"
API_PREFIX: str = "/api"
# Database settings
DB_DIR: Path = Path("/app") / "storage" / "db"
SQLALCHEMY_DATABASE_URL: Optional[str] = None
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
case_sensitive = True
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Ensure database directory exists
self.DB_DIR.mkdir(parents=True, exist_ok=True)
# Set database URL if not provided
if not self.SQLALCHEMY_DATABASE_URL:
self.SQLALCHEMY_DATABASE_URL = f"sqlite:///{self.DB_DIR}/db.sqlite"
settings = Settings()

2
app/db/base.py Normal file
View File

@ -0,0 +1,2 @@
# Import all models here for Alembic migrations
from app.models.task import Task # noqa

14
app/db/base_class.py Normal file
View File

@ -0,0 +1,14 @@
from typing import Any
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import DeclarativeBase
class Base(DeclarativeBase):
id: Any
__name__: str
# Generate __tablename__ automatically
@declared_attr
def __tablename__(cls) -> str:
return cls.__name__.lower()

22
app/db/session.py Normal file
View File

@ -0,0 +1,22 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from app.core.config import settings
# Create SQLAlchemy engine
engine = create_engine(
settings.SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False} # Only needed for SQLite
)
# Create session factory
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db():
"""Dependency for getting the database session"""
db = SessionLocal()
try:
yield db
finally:
db.close()

30
app/models/task.py Normal file
View File

@ -0,0 +1,30 @@
from datetime import datetime
from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text
from sqlalchemy.sql import func
from app.db.base_class import Base
class Task(Base):
"""Task model representing a to-do item"""
id = Column(Integer, primary_key=True, index=True)
title = Column(String(255), nullable=False, index=True)
description = Column(Text, nullable=True)
is_completed = Column(Boolean, default=False)
# Priority: 1 (Low), 2 (Medium), 3 (High)
priority = Column(Integer, default=2)
# Audit timestamp fields
created_at = Column(DateTime, default=func.now(), nullable=False)
updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False)
completed_at = Column(DateTime, nullable=True)
# Optional due date
due_date = Column(DateTime, nullable=True)
def mark_as_completed(self) -> None:
"""Mark the task as completed and set the completion timestamp"""
self.is_completed = True
self.completed_at = datetime.utcnow()

3
app/schemas/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from app.schemas.task import TaskCreate, TaskUpdate, TaskInDB, TaskResponse, PriorityEnum
__all__ = ["TaskCreate", "TaskUpdate", "TaskInDB", "TaskResponse", "PriorityEnum"]

45
app/schemas/task.py Normal file
View File

@ -0,0 +1,45 @@
from datetime import datetime
from enum import Enum
from typing import Optional
from pydantic import BaseModel, Field
class PriorityEnum(int, Enum):
LOW = 1
MEDIUM = 2
HIGH = 3
class TaskBase(BaseModel):
title: str = Field(..., min_length=1, max_length=255, description="Title of the task")
description: Optional[str] = Field(None, description="Detailed description of the task")
priority: PriorityEnum = Field(PriorityEnum.MEDIUM, description="Priority level of the task")
due_date: Optional[datetime] = Field(None, description="Due date for the task")
class TaskCreate(TaskBase):
pass
class TaskUpdate(BaseModel):
title: Optional[str] = Field(None, min_length=1, max_length=255)
description: Optional[str] = None
is_completed: Optional[bool] = None
priority: Optional[PriorityEnum] = None
due_date: Optional[datetime] = None
class TaskInDB(TaskBase):
id: int
is_completed: bool
created_at: datetime
updated_at: datetime
completed_at: Optional[datetime] = None
class Config:
from_attributes = True
class TaskResponse(TaskInDB):
"""Task schema for response"""
pass

42
main.py Normal file
View File

@ -0,0 +1,42 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.routes import router as api_router
from app.core.config import settings
def create_app() -> FastAPI:
app = FastAPI(
title=settings.PROJECT_NAME,
description=settings.PROJECT_DESCRIPTION,
version=settings.VERSION,
docs_url="/docs",
redoc_url="/redoc",
)
# Set all CORS enabled origins
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(api_router)
return app
app = create_app()
@app.get("/health", tags=["Health"])
def health_check():
return {"status": "healthy"}
if __name__ == "__main__":
import uvicorn
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)

82
migrations/env.py Normal file
View File

@ -0,0 +1,82 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
from app.db.base import Base # noqa
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
# Configure for SQLite to support column and table alterations
is_sqlite = connection.dialect.name == "sqlite"
context.configure(
connection=connection,
target_metadata=target_metadata,
# Enable SQLite 'batch mode' for alterations
render_as_batch=is_sqlite,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,40 @@
"""create task table
Revision ID: 202308201234
Revises:
Create Date: 2023-08-20 12:34:56.789012
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '202308201234'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=False, default=False),
sa.Column('priority', sa.Integer(), nullable=False, default=2),
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('due_date', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False)
op.create_index(op.f('ix_task_title'), 'task', ['title'], unique=False)
def downgrade():
op.drop_index(op.f('ix_task_title'), table_name='task')
op.drop_index(op.f('ix_task_id'), table_name='task')
op.drop_table('task')

8
requirements.txt Normal file
View File

@ -0,0 +1,8 @@
fastapi>=0.103.1,<0.104.0
uvicorn>=0.23.2,<0.24.0
sqlalchemy>=2.0.20,<2.1.0
alembic>=1.12.0,<1.13.0
pydantic>=2.3.0,<2.4.0
python-multipart>=0.0.6,<0.1.0
python-dotenv>=1.0.0,<1.1.0
ruff>=0.0.287,<0.1.0