Implement Task Manager API with FastAPI and SQLite

This commit is contained in:
Automated Action 2025-05-24 15:58:57 +00:00
parent 78daa7aa1b
commit 43fbc5930c
21 changed files with 616 additions and 2 deletions

131
README.md
View File

@ -1,3 +1,130 @@
# FastAPI Application
# Task Manager API
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
A RESTful API for managing tasks built with FastAPI and SQLite.
## Features
- Create, read, update, and delete tasks
- Filter tasks by completion status and priority
- Health check endpoint
- SQLite database with SQLAlchemy ORM
- Database migrations with Alembic
## Tech Stack
- **Framework**: FastAPI
- **Database**: SQLite
- **ORM**: SQLAlchemy
- **Migrations**: Alembic
## Project Structure
```
.
├── app
│ ├── api
│ │ ├── endpoints
│ │ │ ├── health.py
│ │ │ └── tasks.py
│ │ └── api.py
│ ├── core
│ ├── db
│ │ └── database.py
│ ├── models
│ │ └── task.py
│ ├── schemas
│ │ └── task.py
│ └── main.py
├── migrations
│ ├── versions
│ │ └── 001_create_tasks_table.py
│ ├── env.py
│ └── script.py.mako
├── storage
│ └── db
├── alembic.ini
├── main.py
├── README.md
└── requirements.txt
```
## Installation
1. Clone the repository:
```bash
git clone https://github.com/yourusername/taskmanagerapi.git
cd taskmanagerapi
```
2. Install dependencies:
```bash
pip install -r requirements.txt
```
3. Run database migrations:
```bash
alembic upgrade head
```
## Usage
### Running the API
```bash
# Development with auto-reload
python main.py
# Or using uvicorn directly
uvicorn app.main:app --reload
```
The API will be available at `http://localhost:8000`.
### API Documentation
FastAPI automatically generates interactive API documentation:
- Swagger UI: `http://localhost:8000/docs`
- ReDoc: `http://localhost:8000/redoc`
## API Endpoints
### Health Check
- `GET /health` - Check API and database health
### Tasks
- `GET /tasks` - List all tasks (with optional filtering)
- `POST /tasks` - Create a new task
- `GET /tasks/{task_id}` - Get a specific task
- `PUT /tasks/{task_id}` - Update a task
- `DELETE /tasks/{task_id}` - Delete a task
## Task Model
```json
{
"id": 1,
"title": "Example Task",
"description": "This is an example task",
"completed": false,
"priority": 1,
"due_date": "2023-09-30T00:00:00Z",
"created_at": "2023-09-25T15:00:00Z",
"updated_at": "2023-09-25T15:00:00Z"
}
```
### Priority Levels
- `0`: Low
- `1`: Medium
- `2`: High
## License
This project is licensed under the MIT License.

105
alembic.ini Normal file
View File

@ -0,0 +1,105 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
app/__init__.py Normal file
View File

0
app/api/__init__.py Normal file
View File

8
app/api/api.py Normal file
View File

@ -0,0 +1,8 @@
from fastapi import APIRouter
from app.api.endpoints import tasks, health
api_router = APIRouter()
api_router.include_router(health.router, prefix="/health", tags=["health"])
api_router.include_router(tasks.router, prefix="/tasks", tags=["tasks"])

View File

View File

@ -0,0 +1,24 @@
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from app.db.database import get_db
router = APIRouter()
@router.get("/", response_model=dict)
def health_check(db: Session = Depends(get_db)):
"""
Health check endpoint to verify API and database connectivity.
"""
try:
# Try to execute a simple query to check DB connection
db.execute("SELECT 1")
db_status = "healthy"
except Exception:
db_status = "unhealthy"
return {
"status": "ok",
"database": db_status,
}

View File

@ -0,0 +1,90 @@
from typing import List
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from app.db.database import get_db
from app.models.task import Task as TaskModel
from app.schemas.task import TaskCreate, TaskUpdate, Task
router = APIRouter()
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
def create_task(task: TaskCreate, db: Session = Depends(get_db)):
"""
Create a new task.
"""
db_task = TaskModel(**task.dict())
db.add(db_task)
db.commit()
db.refresh(db_task)
return db_task
@router.get("/", response_model=List[Task])
def read_tasks(
skip: int = 0,
limit: int = 100,
completed: bool = None,
priority: int = None,
db: Session = Depends(get_db),
):
"""
Retrieve tasks with optional filtering.
"""
query = db.query(TaskModel)
# Apply filters if provided
if completed is not None:
query = query.filter(TaskModel.completed == completed)
if priority is not None:
query = query.filter(TaskModel.priority == priority)
# Apply pagination
tasks = query.order_by(TaskModel.created_at.desc()).offset(skip).limit(limit).all()
return tasks
@router.get("/{task_id}", response_model=Task)
def read_task(task_id: int, db: Session = Depends(get_db)):
"""
Get a specific task by ID.
"""
task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
if task is None:
raise HTTPException(status_code=404, detail="Task not found")
return task
@router.put("/{task_id}", response_model=Task)
def update_task(task_id: int, task_update: TaskUpdate, db: Session = Depends(get_db)):
"""
Update a task.
"""
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
if db_task is None:
raise HTTPException(status_code=404, detail="Task not found")
# Update task with provided values, skip None values
update_data = task_update.dict(exclude_unset=True)
for key, value in update_data.items():
setattr(db_task, key, value)
db.commit()
db.refresh(db_task)
return db_task
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
def delete_task(task_id: int, db: Session = Depends(get_db)):
"""
Delete a task.
"""
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
if db_task is None:
raise HTTPException(status_code=404, detail="Task not found")
db.delete(db_task)
db.commit()
return None

0
app/core/__init__.py Normal file
View File

0
app/db/__init__.py Normal file
View File

27
app/db/database.py Normal file
View File

@ -0,0 +1,27 @@
from pathlib import Path
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# Create database directory if it doesn't exist
DB_DIR = Path("/app") / "storage" / "db"
DB_DIR.mkdir(parents=True, exist_ok=True)
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_DIR}/db.sqlite"
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()

26
app/main.py Normal file
View File

@ -0,0 +1,26 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.api import api_router
from app.db.database import engine, Base
# Create the database tables if they don't exist
Base.metadata.create_all(bind=engine)
app = FastAPI(
title="Task Manager API",
description="API for managing tasks",
version="0.1.0",
)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # In production, you should restrict this to specific origins
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include API router
app.include_router(api_router)

1
app/models/__init__.py Normal file
View File

@ -0,0 +1 @@
from app.models.task import Task # noqa

16
app/models/task.py Normal file
View File

@ -0,0 +1,16 @@
from datetime import datetime
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text
from app.db.database import Base
class Task(Base):
__tablename__ = "tasks"
id = Column(Integer, primary_key=True, index=True)
title = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
completed = Column(Boolean, default=False)
priority = Column(Integer, default=0) # 0: Low, 1: Medium, 2: High
due_date = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)

1
app/schemas/__init__.py Normal file
View File

@ -0,0 +1 @@
from app.schemas.task import TaskBase, TaskCreate, TaskUpdate, TaskInDB, Task # noqa

36
app/schemas/task.py Normal file
View File

@ -0,0 +1,36 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field
class TaskBase(BaseModel):
title: str = Field(..., max_length=255, description="Title of the task")
description: Optional[str] = Field(None, description="Detailed description of the task")
completed: bool = Field(False, description="Whether the task is completed")
priority: int = Field(0, ge=0, le=2, description="Priority level (0: Low, 1: Medium, 2: High)")
due_date: Optional[datetime] = Field(None, description="Due date of the task")
class TaskCreate(TaskBase):
pass
class TaskUpdate(BaseModel):
title: Optional[str] = Field(None, max_length=255, description="Title of the task")
description: Optional[str] = Field(None, description="Detailed description of the task")
completed: Optional[bool] = Field(None, description="Whether the task is completed")
priority: Optional[int] = Field(None, ge=0, le=2, description="Priority level (0: Low, 1: Medium, 2: High)")
due_date: Optional[datetime] = Field(None, description="Due date of the task")
class TaskInDB(TaskBase):
id: int
created_at: datetime
updated_at: datetime
class Config:
orm_mode = True
class Task(TaskInDB):
pass

4
main.py Normal file
View File

@ -0,0 +1,4 @@
import uvicorn
if __name__ == "__main__":
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)

81
migrations/env.py Normal file
View File

@ -0,0 +1,81 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from app.db.database import Base
from app.models import Task # noqa
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
is_sqlite = connection.dialect.name == 'sqlite'
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=is_sqlite, # Add this for SQLite
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,37 @@
"""create tasks table
Revision ID: 001
Revises:
Create Date: 2023-09-25
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('completed', sa.Boolean(), default=False),
sa.Column('priority', sa.Integer(), default=0),
sa.Column('due_date', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), default=sa.func.now(), onupdate=sa.func.now()),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_tasks_id'), 'tasks', ['id'], unique=False)
def downgrade():
op.drop_index(op.f('ix_tasks_id'), table_name='tasks')
op.drop_table('tasks')

7
requirements.txt Normal file
View File

@ -0,0 +1,7 @@
fastapi>=0.103.1
uvicorn>=0.23.2
sqlalchemy>=2.0.20
pydantic>=2.3.0
alembic>=1.12.0
python-dotenv>=1.0.0
ruff>=0.0.286