Implement simple REST API with FastAPI and SQLite

This commit is contained in:
Automated Action 2025-05-17 16:21:55 +00:00
parent ba21305da7
commit 244afc42b4
26 changed files with 804 additions and 2 deletions

143
README.md
View File

@ -1,3 +1,142 @@
# FastAPI Application # Simple REST API
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform. A simple REST API built with FastAPI and SQLite.
## Features
- FastAPI with automatic OpenAPI documentation
- SQLite database with SQLAlchemy ORM
- Alembic migrations
- CRUD operations for items
- Health check endpoint
- Input validation and error handling
- Pagination and filtering
## Project Structure
```
.
├── alembic.ini # Alembic configuration
├── app # Application package
│ ├── api # API endpoints
│ │ ├── endpoints # API route handlers
│ │ └── router.py # API router
│ ├── core # Core application code
│ │ └── config.py # Application settings
│ ├── crud # Database CRUD operations
│ ├── database # Database configuration
│ │ ├── crud_base.py # Base CRUD class
│ │ └── session.py # Database session
│ ├── models # SQLAlchemy models
│ │ └── item.py # Item model
│ ├── schemas # Pydantic schemas
│ │ └── item.py # Item schemas
│ └── utils # Utility functions
├── main.py # Application entry point
├── migrations # Alembic migrations
│ ├── env.py # Alembic environment
│ ├── README # Migrations README
│ ├── script.py.mako # Migration script template
│ └── versions # Migration versions
│ └── *.py # Migration scripts
└── requirements.txt # Project dependencies
```
## Getting Started
### Prerequisites
- Python 3.8 or higher
- SQLite (included with Python)
### Installation
1. Clone the repository:
```bash
git clone https://github.com/yourusername/simple-rest-api.git
cd simple-rest-api
```
2. Create a virtual environment:
```bash
python3 -m venv venv
source venv/bin/activate # On Windows: venv\Scripts\activate
```
3. Install dependencies:
```bash
pip install -r requirements.txt
```
4. Apply database migrations:
```bash
alembic upgrade head
```
### Running the API
Start the application with:
```bash
uvicorn main:app --reload
```
The API will be available at http://localhost:8000.
- API Documentation: http://localhost:8000/docs
- Alternative Documentation: http://localhost:8000/redoc
## API Endpoints
### Health Check
- `GET /api/v1/health` - Check if the API is running
### Items
- `GET /api/v1/items` - List all items (with pagination and filtering)
- `GET /api/v1/items/{item_id}` - Get a specific item
- `POST /api/v1/items` - Create a new item
- `PUT /api/v1/items/{item_id}` - Update an item
- `DELETE /api/v1/items/{item_id}` - Delete an item
## Environment Variables
You can configure the application using environment variables:
- `PROJECT_NAME` - Application name (default: "SimpleRestAPI")
- `BACKEND_CORS_ORIGINS` - CORS origins, comma-separated (default: empty)
## Development
### Adding a New Migration
```bash
alembic revision --autogenerate -m "description of changes"
```
### Applying Migrations
```bash
alembic upgrade head
```
### Code Linting
```bash
ruff .
```
### Code Auto-Formatting
```bash
ruff . --fix
```
## License
This project is licensed under the MIT License.

102
alembic.ini Normal file
View File

@ -0,0 +1,102 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d%%(second).2d_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# SQLite URL example
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
app/__init__.py Normal file
View File

0
app/api/__init__.py Normal file
View File

View File

View File

@ -0,0 +1,19 @@
from fastapi import APIRouter, status
from pydantic import BaseModel
router = APIRouter()
class HealthResponse(BaseModel):
status: str
@router.get(
"",
response_model=HealthResponse,
status_code=status.HTTP_200_OK,
summary="Health check endpoint",
description="Check if the API is running",
)
async def health_check() -> HealthResponse:
return HealthResponse(status="ok")

132
app/api/endpoints/items.py Normal file
View File

@ -0,0 +1,132 @@
from typing import Any, List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from app import crud, schemas
from app.database.session import get_db
router = APIRouter()
@router.get(
"/",
response_model=List[schemas.Item],
status_code=status.HTTP_200_OK,
summary="Get all items",
description="Retrieve all items with pagination and filtering options",
)
def read_items(
db: Session = Depends(get_db),
skip: int = 0,
limit: int = 100,
active: Optional[bool] = None,
) -> Any:
"""
Retrieve all items with pagination and optional active status filtering.
"""
if active is not None:
items = crud.item.get_multi_by_active(db, active=active, skip=skip, limit=limit)
else:
items = crud.item.get_multi(db, skip=skip, limit=limit)
return items
@router.get(
"/{item_id}",
response_model=schemas.Item,
status_code=status.HTTP_200_OK,
summary="Get an item by ID",
description="Get a specific item by its ID",
)
def read_item(
*,
db: Session = Depends(get_db),
item_id: int,
) -> Any:
"""
Get an item by ID.
"""
item = crud.item.get(db=db, id=item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Item not found",
)
return item
@router.post(
"/",
response_model=schemas.Item,
status_code=status.HTTP_201_CREATED,
summary="Create a new item",
description="Create a new item with the provided data",
)
def create_item(
*,
db: Session = Depends(get_db),
item_in: schemas.ItemCreate,
) -> Any:
"""
Create a new item.
"""
item = crud.item.get_by_name(db=db, name=item_in.name)
if item:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"An item with the name '{item_in.name}' already exists",
)
item = crud.item.create(db=db, obj_in=item_in)
return item
@router.put(
"/{item_id}",
response_model=schemas.Item,
status_code=status.HTTP_200_OK,
summary="Update an item",
description="Update an item with the provided data",
)
def update_item(
*,
db: Session = Depends(get_db),
item_id: int,
item_in: schemas.ItemUpdate,
) -> Any:
"""
Update an item.
"""
item = crud.item.get(db=db, id=item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Item not found",
)
item = crud.item.update(db=db, db_obj=item, obj_in=item_in)
return item
@router.delete(
"/{item_id}",
status_code=status.HTTP_204_NO_CONTENT,
response_model=None,
summary="Delete an item",
description="Delete an item by its ID",
)
def delete_item(
*,
db: Session = Depends(get_db),
item_id: int,
) -> None:
"""
Delete an item.
"""
item = crud.item.get(db=db, id=item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Item not found",
)
crud.item.remove(db=db, id=item_id)
return None

7
app/api/router.py Normal file
View File

@ -0,0 +1,7 @@
from fastapi import APIRouter
from app.api.endpoints import health, items
api_router = APIRouter()
api_router.include_router(health.router, prefix="/health", tags=["health"])
api_router.include_router(items.router, prefix="/items", tags=["items"])

0
app/core/__init__.py Normal file
View File

29
app/core/config.py Normal file
View File

@ -0,0 +1,29 @@
from typing import List, Union
from pathlib import Path
from pydantic import AnyHttpUrl, validator
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
PROJECT_NAME: str = "SimpleRestAPI"
API_V1_STR: str = "/api/v1"
# CORS
BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = []
@validator("BACKEND_CORS_ORIGINS", pre=True)
def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]:
if isinstance(v, str) and not v.startswith("["):
return [i.strip() for i in v.split(",")]
elif isinstance(v, (list, str)):
return v
raise ValueError(v)
# Database
DB_DIR: Path = Path("/app") / "storage" / "db"
SQLALCHEMY_DATABASE_URL: str = f"sqlite:///{DB_DIR}/db.sqlite"
class Config:
case_sensitive = True
env_file = ".env"
settings = Settings()

4
app/crud/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from app.crud.crud_item import item
# For easy importing
__all__ = ["item"]

26
app/crud/crud_item.py Normal file
View File

@ -0,0 +1,26 @@
from typing import List, Optional
from sqlalchemy.orm import Session
from app.database.crud_base import CRUDBase
from app.models.item import Item
from app.schemas.item import ItemCreate, ItemUpdate
class CRUDItem(CRUDBase[Item, ItemCreate, ItemUpdate]):
def get_by_name(self, db: Session, *, name: str) -> Optional[Item]:
return db.query(Item).filter(Item.name == name).first()
def get_multi_by_active(
self, db: Session, *, active: bool, skip: int = 0, limit: int = 100
) -> List[Item]:
return (
db.query(Item)
.filter(Item.is_active == active)
.offset(skip)
.limit(limit)
.all()
)
item = CRUDItem(Item)

0
app/database/__init__.py Normal file
View File

64
app/database/crud_base.py Normal file
View File

@ -0,0 +1,64 @@
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy.orm import Session
from app.database.session import Base
ModelType = TypeVar("ModelType", bound=Base)
CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
def __init__(self, model: Type[ModelType]):
"""
CRUD object with default methods to Create, Read, Update, Delete (CRUD).
**Parameters**
* `model`: A SQLAlchemy model class
* `schema`: A Pydantic model (schema) class
"""
self.model = model
def get(self, db: Session, id: Any) -> Optional[ModelType]:
return db.query(self.model).filter(self.model.id == id).first()
def get_multi(
self, db: Session, *, skip: int = 0, limit: int = 100
) -> List[ModelType]:
return db.query(self.model).offset(skip).limit(limit).all()
def create(self, db: Session, *, obj_in: CreateSchemaType) -> ModelType:
obj_in_data = jsonable_encoder(obj_in)
db_obj = self.model(**obj_in_data)
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def update(
self,
db: Session,
*,
db_obj: ModelType,
obj_in: Union[UpdateSchemaType, Dict[str, Any]]
) -> ModelType:
obj_data = jsonable_encoder(db_obj)
if isinstance(obj_in, dict):
update_data = obj_in
else:
update_data = obj_in.dict(exclude_unset=True)
for field in obj_data:
if field in update_data:
setattr(db_obj, field, update_data[field])
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def remove(self, db: Session, *, id: int) -> ModelType:
obj = db.query(self.model).get(id)
db.delete(obj)
db.commit()
return obj

29
app/database/session.py Normal file
View File

@ -0,0 +1,29 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from app.core.config import settings
# Ensure the database directory exists
settings.DB_DIR.mkdir(parents=True, exist_ok=True)
# Create SQLite engine with proper settings
engine = create_engine(
settings.SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False}
)
# Create sessionmaker for database sessions
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Base class for all models
Base = declarative_base()
# Dependency for getting DB session in endpoints
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()

4
app/models/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from app.models.item import Item
# For easy importing
__all__ = ["Item"]

16
app/models/item.py Normal file
View File

@ -0,0 +1,16 @@
from sqlalchemy import Column, Integer, String, Boolean, DateTime
from sqlalchemy.sql import func
from app.database.session import Base
class Item(Base):
__tablename__ = "items"
id = Column(Integer, primary_key=True, index=True)
name = Column(String, index=True, nullable=False)
description = Column(String, nullable=True)
price = Column(Integer, nullable=False) # Price in cents
is_active = Column(Boolean, default=True)
created_at = Column(DateTime, server_default=func.now())
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())

4
app/schemas/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from app.schemas.item import Item, ItemCreate, ItemUpdate
# For easy importing
__all__ = ["Item", "ItemCreate", "ItemUpdate"]

39
app/schemas/item.py Normal file
View File

@ -0,0 +1,39 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field
# Shared properties
class ItemBase(BaseModel):
name: str
description: Optional[str] = None
price: int = Field(..., description="Price in cents")
is_active: bool = True
# Properties to receive on item creation
class ItemCreate(ItemBase):
pass
# Properties to receive on item update
class ItemUpdate(ItemBase):
name: Optional[str] = None
price: Optional[int] = None
is_active: Optional[bool] = None
# Properties shared by models stored in DB
class ItemInDBBase(ItemBase):
id: int
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
# Properties to return to client
class Item(ItemInDBBase):
pass

0
app/utils/__init__.py Normal file
View File

26
main.py Normal file
View File

@ -0,0 +1,26 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.router import api_router
from app.core.config import settings
app = FastAPI(
title=settings.PROJECT_NAME,
description="Simple REST API with FastAPI and SQLite",
version="0.1.0",
openapi_url=f"{settings.API_V1_STR}/openapi.json",
docs_url="/docs",
redoc_url="/redoc",
)
# Set all CORS enabled origins
if settings.BACKEND_CORS_ORIGINS:
app.add_middleware(
CORSMiddleware,
allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router, prefix=settings.API_V1_STR)

1
migrations/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration with SQLite.

83
migrations/env.py Normal file
View File

@ -0,0 +1,83 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Import database models and metadata
from app.database.session import Base
from app.models import Item # noqa: F401
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# target metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
is_sqlite = connection.dialect.name == 'sqlite'
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=is_sqlite, # Key configuration for SQLite
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,44 @@
"""create items table
Revision ID: 20230901000000
Revises:
Create Date: 2023-09-01 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20230901000000'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create items table
op.create_table(
'items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.String(), nullable=True),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# Create indexes
op.create_index(op.f('ix_items_id'), 'items', ['id'], unique=False)
op.create_index(op.f('ix_items_name'), 'items', ['name'], unique=False)
def downgrade() -> None:
# Drop indexes
op.drop_index(op.f('ix_items_name'), table_name='items')
op.drop_index(op.f('ix_items_id'), table_name='items')
# Drop table
op.drop_table('items')

10
requirements.txt Normal file
View File

@ -0,0 +1,10 @@
fastapi>=0.100.0
uvicorn[standard]>=0.23.0
sqlalchemy>=2.0.0
alembic>=1.12.0
pydantic>=2.0.0
pydantic-settings>=2.0.0
python-multipart>=0.0.6
email-validator>=2.0.0
python-dotenv>=1.0.0
ruff>=0.1.0