Add FastAPI todo application with SQLite database

- Implement CRUD operations for todos
- Add FastAPI with CORS middleware
- Set up SQLite database with Alembic migrations
- Include health endpoint and API documentation
- Configure Ruff for code linting
- Update README with comprehensive documentation
This commit is contained in:
Automated Action 2025-06-17 07:03:02 +00:00
parent 8327bbf6fc
commit ae91c493c6
16 changed files with 439 additions and 2 deletions

View File

@ -1,3 +1,72 @@
# FastAPI Application
# Todo API
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
A simple todo application built with FastAPI and SQLite.
## Features
- Create, read, update, and delete todos
- SQLite database with Alembic migrations
- FastAPI with automatic API documentation
- Health check endpoint
- CORS enabled for all origins
## Installation
1. Install dependencies:
```bash
pip install -r requirements.txt
```
2. Run database migrations:
```bash
alembic upgrade head
```
3. Start the application:
```bash
uvicorn main:app --host 0.0.0.0 --port 8000
```
## API Endpoints
- `GET /` - Root endpoint with API information
- `GET /health` - Health check endpoint
- `GET /docs` - Interactive API documentation
- `GET /redoc` - Alternative API documentation
- `GET /openapi.json` - OpenAPI specification
### Todo Endpoints
- `POST /todos/` - Create a new todo
- `GET /todos/` - Get all todos (with pagination)
- `GET /todos/{todo_id}` - Get a specific todo
- `PUT /todos/{todo_id}` - Update a todo
- `DELETE /todos/{todo_id}` - Delete a todo
## Data Model
Todo items have the following fields:
- `id`: Unique identifier (auto-generated)
- `title`: Todo title (required)
- `description`: Optional description
- `completed`: Boolean completion status (default: false)
- `created_at`: Creation timestamp
- `updated_at`: Last update timestamp
## Environment Variables
No environment variables are required for basic operation. The SQLite database is stored at `/app/storage/db/db.sqlite`.
## Development
To run with auto-reload during development:
```bash
uvicorn main:app --reload --host 0.0.0.0 --port 8000
```
## API Documentation
Once the server is running, you can access:
- Interactive documentation: http://localhost:8000/docs
- Alternative documentation: http://localhost:8000/redoc
- OpenAPI specification: http://localhost:8000/openapi.json

42
alembic.ini Normal file
View File

@ -0,0 +1,42 @@
[alembic]
script_location = alembic
prepend_sys_path = .
version_path_separator = os
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
[post_write_hooks]
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

83
alembic/env.py Normal file
View File

@ -0,0 +1,83 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
import sys
from pathlib import Path
# Add the project root to the Python path
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
from app.db.base import Base
from app.models.todo import Todo
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
alembic/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,37 @@
"""Initial todo table
Revision ID: 001
Revises:
Create Date: 2024-01-01 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create todos table
op.create_table(
'todos',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('description', sa.String(), nullable=True),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_todos_id'), 'todos', ['id'], unique=False)
op.create_index(op.f('ix_todos_title'), 'todos', ['title'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_todos_title'), table_name='todos')
op.drop_index(op.f('ix_todos_id'), table_name='todos')
op.drop_table('todos')

0
app/__init__.py Normal file
View File

0
app/db/__init__.py Normal file
View File

3
app/db/base.py Normal file
View File

@ -0,0 +1,3 @@
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()

19
app/db/session.py Normal file
View File

@ -0,0 +1,19 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from pathlib import Path
from app.db.base import Base
DB_DIR = Path("/app/storage/db")
DB_DIR.mkdir(parents=True, exist_ok=True)
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_DIR}/db.sqlite"
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base.metadata.create_all(bind=engine)

0
app/models/__init__.py Normal file
View File

14
app/models/todo.py Normal file
View File

@ -0,0 +1,14 @@
from sqlalchemy import Column, Integer, String, Boolean, DateTime
from sqlalchemy.sql import func
from app.db.base import Base
class Todo(Base):
__tablename__ = "todos"
id = Column(Integer, primary_key=True, index=True)
title = Column(String, index=True, nullable=False)
description = Column(String, nullable=True)
completed = Column(Boolean, default=False, nullable=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

0
app/schemas/__init__.py Normal file
View File

28
app/schemas/todo.py Normal file
View File

@ -0,0 +1,28 @@
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
class TodoBase(BaseModel):
title: str
description: Optional[str] = None
completed: bool = False
class TodoCreate(TodoBase):
pass
class TodoUpdate(BaseModel):
title: Optional[str] = None
description: Optional[str] = None
completed: Optional[bool] = None
class TodoResponse(TodoBase):
id: int
created_at: datetime
updated_at: Optional[datetime] = None
class Config:
from_attributes = True

101
main.py Normal file
View File

@ -0,0 +1,101 @@
from fastapi import FastAPI, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy.orm import Session
from typing import List
import os
from pathlib import Path
from app.db.session import SessionLocal
from app.models.todo import Todo as TodoModel
from app.schemas.todo import TodoCreate, TodoUpdate, TodoResponse
app = FastAPI(
title="Todo API",
description="A simple todo application API",
version="1.0.0",
openapi_url="/openapi.json"
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.get("/")
async def root():
return {
"title": "Todo API",
"documentation": "/docs",
"health": "/health"
}
@app.get("/health")
async def health_check():
return {"status": "healthy", "service": "Todo API"}
@app.post("/todos/", response_model=TodoResponse)
async def create_todo(todo: TodoCreate, db: Session = Depends(get_db)):
db_todo = TodoModel(**todo.dict())
db.add(db_todo)
db.commit()
db.refresh(db_todo)
return db_todo
@app.get("/todos/", response_model=List[TodoResponse])
async def read_todos(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
todos = db.query(TodoModel).offset(skip).limit(limit).all()
return todos
@app.get("/todos/{todo_id}", response_model=TodoResponse)
async def read_todo(todo_id: int, db: Session = Depends(get_db)):
todo = db.query(TodoModel).filter(TodoModel.id == todo_id).first()
if todo is None:
raise HTTPException(status_code=404, detail="Todo not found")
return todo
@app.put("/todos/{todo_id}", response_model=TodoResponse)
async def update_todo(todo_id: int, todo: TodoUpdate, db: Session = Depends(get_db)):
db_todo = db.query(TodoModel).filter(TodoModel.id == todo_id).first()
if db_todo is None:
raise HTTPException(status_code=404, detail="Todo not found")
update_data = todo.dict(exclude_unset=True)
for field, value in update_data.items():
setattr(db_todo, field, value)
db.commit()
db.refresh(db_todo)
return db_todo
@app.delete("/todos/{todo_id}")
async def delete_todo(todo_id: int, db: Session = Depends(get_db)):
todo = db.query(TodoModel).filter(TodoModel.id == todo_id).first()
if todo is None:
raise HTTPException(status_code=404, detail="Todo not found")
db.delete(todo)
db.commit()
return {"message": "Todo deleted successfully"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
fastapi==0.104.1
uvicorn[standard]==0.24.0
sqlalchemy==2.0.23
alembic==1.12.1
pydantic==2.5.0
ruff==0.1.6

11
ruff.toml Normal file
View File

@ -0,0 +1,11 @@
[tool.ruff]
line-length = 88
target-version = "py311"
[tool.ruff.lint]
select = ["E", "F", "W", "I", "N", "UP", "YTT", "ANN", "S", "BLE", "FBT", "B", "A", "COM", "C4", "DTZ", "T10", "DJ", "EM", "EXE", "FA", "ISC", "ICN", "G", "INP", "PIE", "T20", "PYI", "PT", "Q", "RSE", "RET", "SLF", "SLOT", "SIM", "TID", "TCH", "INT", "ARG", "PTH", "TD", "FIX", "ERA", "PD", "PGH", "PL", "TRY", "FLY", "NPY", "AIR", "PERF", "FURB", "LOG", "RUF"]
ignore = ["ANN101", "ANN102", "ANN401", "S101", "TD002", "TD003", "FIX002"]
[tool.ruff.lint.per-file-ignores]
"alembic/versions/*.py" = ["ANN", "INP001"]
"alembic/env.py" = ["ANN", "INP001"]