From 47e91dc07459b63673013b9b2fe4f47fe2184821 Mon Sep 17 00:00:00 2001 From: Automated Action Date: Sun, 18 May 2025 19:36:06 +0000 Subject: [PATCH] Implement Task Management API with FastAPI and SQLite --- README.md | 92 +++++++++++- alembic.ini | 86 +++++++++++ app/api/routes/__init__.py | 7 + app/api/routes/tasks.py | 136 ++++++++++++++++++ app/core/config.py | 33 +++++ app/db/base.py | 2 + app/db/base_class.py | 14 ++ app/db/session.py | 22 +++ app/models/task.py | 30 ++++ app/schemas/__init__.py | 3 + app/schemas/task.py | 45 ++++++ main.py | 42 ++++++ migrations/env.py | 82 +++++++++++ migrations/script.py.mako | 24 ++++ .../202308201234_create_task_table.py | 40 ++++++ requirements.txt | 8 ++ 16 files changed, 664 insertions(+), 2 deletions(-) create mode 100644 alembic.ini create mode 100644 app/api/routes/__init__.py create mode 100644 app/api/routes/tasks.py create mode 100644 app/core/config.py create mode 100644 app/db/base.py create mode 100644 app/db/base_class.py create mode 100644 app/db/session.py create mode 100644 app/models/task.py create mode 100644 app/schemas/__init__.py create mode 100644 app/schemas/task.py create mode 100644 main.py create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/202308201234_create_task_table.py create mode 100644 requirements.txt diff --git a/README.md b/README.md index e8acfba..3767d65 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,91 @@ -# FastAPI Application +# Task Management API -This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform. +A simple task management API built with FastAPI and SQLite. + +## Features + +- Create, read, update, and delete tasks +- Mark tasks as completed +- Filter tasks by completion status and priority +- Set due dates for tasks +- Health check endpoint + +## Tech Stack + +- Python 3.9+ +- FastAPI for API development +- SQLAlchemy for ORM +- Alembic for database migrations +- SQLite for database storage +- Pydantic for data validation +- Uvicorn for ASGI server + +## Project Structure + +``` +. +├── app/ # Application package +│ ├── api/ # API endpoints +│ │ └── routes/ # API route definitions +│ ├── core/ # Core configuration +│ ├── db/ # Database configurations +│ ├── models/ # SQLAlchemy models +│ └── schemas/ # Pydantic schemas +├── migrations/ # Alembic migration scripts +├── alembic.ini # Alembic configuration +├── main.py # Entry point for the application +└── requirements.txt # Dependencies +``` + +## Getting Started + +### Prerequisites + +- Python 3.9+ +- SQLite + +### Installation + +1. Clone the repository +2. Install dependencies: + ``` + pip install -r requirements.txt + ``` +3. Run the application: + ``` + uvicorn main:app --reload + ``` + +## API Documentation + +Once the application is running, you can access the API documentation at: + +- Swagger UI: http://localhost:8000/docs +- ReDoc: http://localhost:8000/redoc + +## API Endpoints + +### Health Check + +- `GET /health` - Check if the API is running + +### Task Management + +- `GET /api/tasks` - List all tasks +- `POST /api/tasks` - Create a new task +- `GET /api/tasks/{task_id}` - Get a specific task +- `PUT /api/tasks/{task_id}` - Update a task +- `DELETE /api/tasks/{task_id}` - Delete a task +- `POST /api/tasks/{task_id}/complete` - Mark a task as completed + +## Task Model + +- `id`: Unique identifier +- `title`: Title of the task +- `description`: Optional description +- `is_completed`: Task completion status +- `priority`: Task priority (1=Low, 2=Medium, 3=High) +- `created_at`: Creation timestamp +- `updated_at`: Last update timestamp +- `completed_at`: Completion timestamp (if completed) +- `due_date`: Optional due date \ No newline at end of file diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..442de13 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,86 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat migrations/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:////app/storage/db/db.sqlite +# Use absolute path as instructed + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S \ No newline at end of file diff --git a/app/api/routes/__init__.py b/app/api/routes/__init__.py new file mode 100644 index 0000000..cf59a14 --- /dev/null +++ b/app/api/routes/__init__.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter + +from app.api.routes import tasks + +router = APIRouter(prefix="/api") + +router.include_router(tasks.router, prefix="/tasks", tags=["Tasks"]) \ No newline at end of file diff --git a/app/api/routes/tasks.py b/app/api/routes/tasks.py new file mode 100644 index 0000000..492b415 --- /dev/null +++ b/app/api/routes/tasks.py @@ -0,0 +1,136 @@ +from datetime import datetime +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.orm import Session + +from app.db.session import get_db +from app.models.task import Task +from app.schemas.task import TaskCreate, TaskUpdate, TaskResponse, PriorityEnum + +router = APIRouter() + + +@router.post("", response_model=TaskResponse, status_code=status.HTTP_201_CREATED) +def create_task(*, db: Session = Depends(get_db), task_in: TaskCreate): + """Create a new task""" + db_task = Task( + title=task_in.title, + description=task_in.description, + priority=task_in.priority, + due_date=task_in.due_date + ) + db.add(db_task) + db.commit() + db.refresh(db_task) + return db_task + + +@router.get("", response_model=List[TaskResponse]) +def read_tasks( + *, + db: Session = Depends(get_db), + skip: int = 0, + limit: int = 100, + is_completed: Optional[bool] = None, + priority: Optional[PriorityEnum] = None +): + """ + Retrieve tasks with optional filtering + """ + query = db.query(Task) + + # Apply filters if provided + if is_completed is not None: + query = query.filter(Task.is_completed == is_completed) + + if priority is not None: + query = query.filter(Task.priority == priority) + + # Apply pagination + query = query.offset(skip).limit(limit) + + # Order by creation date (newest first) + query = query.order_by(Task.created_at.desc()) + + return query.all() + + +@router.get("/{task_id}", response_model=TaskResponse) +def read_task(*, db: Session = Depends(get_db), task_id: int): + """Get a specific task by ID""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + return task + + +@router.put("/{task_id}", response_model=TaskResponse) +def update_task( + *, + db: Session = Depends(get_db), + task_id: int, + task_in: TaskUpdate +): + """Update a task""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + + update_data = task_in.model_dump(exclude_unset=True) + + # If task is being marked as completed, set the completed_at timestamp + if "is_completed" in update_data and update_data["is_completed"] and not task.is_completed: + update_data["completed_at"] = datetime.utcnow() + + # Reset completed_at if task is being marked as not completed + if "is_completed" in update_data and not update_data["is_completed"] and task.is_completed: + update_data["completed_at"] = None + + for field, value in update_data.items(): + setattr(task, field, value) + + db.add(task) + db.commit() + db.refresh(task) + return task + + +@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None) +def delete_task(*, db: Session = Depends(get_db), task_id: int): + """Delete a task""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + + db.delete(task) + db.commit() + return None + + +@router.post("/{task_id}/complete", response_model=TaskResponse) +def mark_task_as_completed(*, db: Session = Depends(get_db), task_id: int): + """Mark a task as completed""" + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + + # Only update if the task is not already completed + if not task.is_completed: + task.mark_as_completed() + db.add(task) + db.commit() + db.refresh(task) + + return task \ No newline at end of file diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..85d0667 --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,33 @@ +from pathlib import Path +from pydantic_settings import BaseSettings +from typing import Optional + + +class Settings(BaseSettings): + # Application settings + PROJECT_NAME: str = "Task Management API" + PROJECT_DESCRIPTION: str = "A simple task management API using FastAPI and SQLite" + VERSION: str = "0.1.0" + API_PREFIX: str = "/api" + + # Database settings + DB_DIR: Path = Path("/app") / "storage" / "db" + SQLALCHEMY_DATABASE_URL: Optional[str] = None + + class Config: + env_file = ".env" + env_file_encoding = "utf-8" + case_sensitive = True + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + # Ensure database directory exists + self.DB_DIR.mkdir(parents=True, exist_ok=True) + + # Set database URL if not provided + if not self.SQLALCHEMY_DATABASE_URL: + self.SQLALCHEMY_DATABASE_URL = f"sqlite:///{self.DB_DIR}/db.sqlite" + + +settings = Settings() diff --git a/app/db/base.py b/app/db/base.py new file mode 100644 index 0000000..31d961b --- /dev/null +++ b/app/db/base.py @@ -0,0 +1,2 @@ +# Import all models here for Alembic migrations +from app.models.task import Task # noqa diff --git a/app/db/base_class.py b/app/db/base_class.py new file mode 100644 index 0000000..0a42d13 --- /dev/null +++ b/app/db/base_class.py @@ -0,0 +1,14 @@ +from typing import Any + +from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.orm import DeclarativeBase + + +class Base(DeclarativeBase): + id: Any + __name__: str + + # Generate __tablename__ automatically + @declared_attr + def __tablename__(cls) -> str: + return cls.__name__.lower() diff --git a/app/db/session.py b/app/db/session.py new file mode 100644 index 0000000..2817406 --- /dev/null +++ b/app/db/session.py @@ -0,0 +1,22 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from app.core.config import settings + +# Create SQLAlchemy engine +engine = create_engine( + settings.SQLALCHEMY_DATABASE_URL, + connect_args={"check_same_thread": False} # Only needed for SQLite +) + +# Create session factory +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + + +def get_db(): + """Dependency for getting the database session""" + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/app/models/task.py b/app/models/task.py new file mode 100644 index 0000000..b585f25 --- /dev/null +++ b/app/models/task.py @@ -0,0 +1,30 @@ +from datetime import datetime +from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text +from sqlalchemy.sql import func + +from app.db.base_class import Base + + +class Task(Base): + """Task model representing a to-do item""" + + id = Column(Integer, primary_key=True, index=True) + title = Column(String(255), nullable=False, index=True) + description = Column(Text, nullable=True) + is_completed = Column(Boolean, default=False) + + # Priority: 1 (Low), 2 (Medium), 3 (High) + priority = Column(Integer, default=2) + + # Audit timestamp fields + created_at = Column(DateTime, default=func.now(), nullable=False) + updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) + completed_at = Column(DateTime, nullable=True) + + # Optional due date + due_date = Column(DateTime, nullable=True) + + def mark_as_completed(self) -> None: + """Mark the task as completed and set the completion timestamp""" + self.is_completed = True + self.completed_at = datetime.utcnow() \ No newline at end of file diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..9cd6657 --- /dev/null +++ b/app/schemas/__init__.py @@ -0,0 +1,3 @@ +from app.schemas.task import TaskCreate, TaskUpdate, TaskInDB, TaskResponse, PriorityEnum + +__all__ = ["TaskCreate", "TaskUpdate", "TaskInDB", "TaskResponse", "PriorityEnum"] \ No newline at end of file diff --git a/app/schemas/task.py b/app/schemas/task.py new file mode 100644 index 0000000..78f47d4 --- /dev/null +++ b/app/schemas/task.py @@ -0,0 +1,45 @@ +from datetime import datetime +from enum import Enum +from typing import Optional +from pydantic import BaseModel, Field + + +class PriorityEnum(int, Enum): + LOW = 1 + MEDIUM = 2 + HIGH = 3 + + +class TaskBase(BaseModel): + title: str = Field(..., min_length=1, max_length=255, description="Title of the task") + description: Optional[str] = Field(None, description="Detailed description of the task") + priority: PriorityEnum = Field(PriorityEnum.MEDIUM, description="Priority level of the task") + due_date: Optional[datetime] = Field(None, description="Due date for the task") + + +class TaskCreate(TaskBase): + pass + + +class TaskUpdate(BaseModel): + title: Optional[str] = Field(None, min_length=1, max_length=255) + description: Optional[str] = None + is_completed: Optional[bool] = None + priority: Optional[PriorityEnum] = None + due_date: Optional[datetime] = None + + +class TaskInDB(TaskBase): + id: int + is_completed: bool + created_at: datetime + updated_at: datetime + completed_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class TaskResponse(TaskInDB): + """Task schema for response""" + pass \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..53a4bed --- /dev/null +++ b/main.py @@ -0,0 +1,42 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.api.routes import router as api_router +from app.core.config import settings + +def create_app() -> FastAPI: + app = FastAPI( + title=settings.PROJECT_NAME, + description=settings.PROJECT_DESCRIPTION, + version=settings.VERSION, + docs_url="/docs", + redoc_url="/redoc", + ) + + # Set all CORS enabled origins + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Include routers + app.include_router(api_router) + + return app + + +app = create_app() + + +@app.get("/health", tags=["Health"]) +def health_check(): + return {"status": "healthy"} + + +if __name__ == "__main__": + import uvicorn + + uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True) diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..da51639 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,82 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +from app.db.base import Base # noqa +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + # Configure for SQLite to support column and table alterations + is_sqlite = connection.dialect.name == "sqlite" + + context.configure( + connection=connection, + target_metadata=target_metadata, + # Enable SQLite 'batch mode' for alterations + render_as_batch=is_sqlite, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() \ No newline at end of file diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..1e4564e --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} \ No newline at end of file diff --git a/migrations/versions/202308201234_create_task_table.py b/migrations/versions/202308201234_create_task_table.py new file mode 100644 index 0000000..22083af --- /dev/null +++ b/migrations/versions/202308201234_create_task_table.py @@ -0,0 +1,40 @@ +"""create task table + +Revision ID: 202308201234 +Revises: +Create Date: 2023-08-20 12:34:56.789012 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '202308201234' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + 'task', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('is_completed', sa.Boolean(), nullable=False, default=False), + sa.Column('priority', sa.Integer(), nullable=False, default=2), + sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()), + sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('due_date', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False) + op.create_index(op.f('ix_task_title'), 'task', ['title'], unique=False) + + +def downgrade(): + op.drop_index(op.f('ix_task_title'), table_name='task') + op.drop_index(op.f('ix_task_id'), table_name='task') + op.drop_table('task') \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a619c0a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +fastapi>=0.103.1,<0.104.0 +uvicorn>=0.23.2,<0.24.0 +sqlalchemy>=2.0.20,<2.1.0 +alembic>=1.12.0,<1.13.0 +pydantic>=2.3.0,<2.4.0 +python-multipart>=0.0.6,<0.1.0 +python-dotenv>=1.0.0,<1.1.0 +ruff>=0.0.287,<0.1.0