Create REST API with FastAPI and SQLite
- Set up project structure with FastAPI app - Implement SQLAlchemy models and async database connection - Create CRUD endpoints for items resource - Add health endpoint for monitoring - Configure Alembic for database migrations - Create comprehensive documentation generated with BackendIM... (backend.im)
This commit is contained in:
parent
62844df7b6
commit
629ba0ee1c
64
README.md
64
README.md
@ -1,3 +1,63 @@
|
|||||||
# FastAPI Application
|
# Generic REST API Service
|
||||||
|
|
||||||
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
|
A RESTful API service built with FastAPI and SQLite for database storage.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- RESTful API endpoints for CRUD operations on items
|
||||||
|
- Async SQLAlchemy with SQLite database
|
||||||
|
- Database migrations with Alembic
|
||||||
|
- Health check endpoint
|
||||||
|
- Pydantic schemas for validation
|
||||||
|
- Comprehensive API documentation
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
├── alembic/ # Database migration files
|
||||||
|
├── app/ # Application package
|
||||||
|
│ ├── api/ # API endpoints and routes
|
||||||
|
│ ├── db/ # Database connection and utilities
|
||||||
|
│ ├── models/ # SQLAlchemy ORM models
|
||||||
|
│ └── schemas/ # Pydantic schemas for validation and serialization
|
||||||
|
├── main.py # Application entry point
|
||||||
|
├── requirements.txt # Project dependencies
|
||||||
|
└── README.md # This file
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
- `GET /health`: Check API health status
|
||||||
|
- `GET /api/items`: List all items
|
||||||
|
- `GET /api/items/{item_id}`: Get a specific item
|
||||||
|
- `POST /api/items`: Create a new item
|
||||||
|
- `PUT /api/items/{item_id}`: Update an existing item
|
||||||
|
- `DELETE /api/items/{item_id}`: Delete an item
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
1. Install dependencies:
|
||||||
|
```
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Run the application:
|
||||||
|
```
|
||||||
|
uvicorn main:app --reload
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Access the API documentation:
|
||||||
|
- Swagger UI: http://localhost:8000/docs
|
||||||
|
- ReDoc: http://localhost:8000/redoc
|
||||||
|
|
||||||
|
## Database Migrations
|
||||||
|
|
||||||
|
Database migrations are managed with Alembic:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Apply migrations
|
||||||
|
alembic upgrade head
|
||||||
|
|
||||||
|
# Create a new migration
|
||||||
|
alembic revision --autogenerate -m "description"
|
||||||
|
```
|
89
alembic.ini
Normal file
89
alembic.ini
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
1
alembic/README
Normal file
1
alembic/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration with SQLAlchemy.
|
87
alembic/env.py
Normal file
87
alembic/env.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
import asyncio
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import pool
|
||||||
|
from sqlalchemy.engine import Connection
|
||||||
|
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
from app.db.database import Base
|
||||||
|
from app.models.item import Item
|
||||||
|
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def do_run_migrations(connection: Connection) -> None:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = async_engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with connectable.connect() as connection:
|
||||||
|
await connection.run_sync(do_run_migrations)
|
||||||
|
|
||||||
|
await connectable.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
asyncio.run(run_migrations_online())
|
24
alembic/script.py.mako
Normal file
24
alembic/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
37
alembic/versions/initial_migration.py
Normal file
37
alembic/versions/initial_migration.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""initial migration
|
||||||
|
|
||||||
|
Revision ID: initial_migration
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-05-15
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'initial_migration'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create items table
|
||||||
|
op.create_table(
|
||||||
|
'items',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_items_id'), 'items', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_items_name'), 'items', ['name'], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index(op.f('ix_items_name'), table_name='items')
|
||||||
|
op.drop_index(op.f('ix_items_id'), table_name='items')
|
||||||
|
op.drop_table('items')
|
1
app/__init__.py
Normal file
1
app/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# app package initialization
|
1
app/api/__init__.py
Normal file
1
app/api/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# api package initialization
|
26
app/api/health.py
Normal file
26
app/api/health.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.db.database import engine
|
||||||
|
|
||||||
|
router = APIRouter(tags=["health"])
|
||||||
|
|
||||||
|
@router.get("/health", summary="Check API health")
|
||||||
|
async def health_check(db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Check the health of the API.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Health status information
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Check database connection
|
||||||
|
db_status = True
|
||||||
|
except Exception:
|
||||||
|
db_status = False
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"database": db_status,
|
||||||
|
"version": "0.1.0"
|
||||||
|
}
|
119
app/api/items.py
Normal file
119
app/api/items.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.models.item import Item
|
||||||
|
from app.schemas.item import ItemCreate, ItemResponse, ItemUpdate
|
||||||
|
|
||||||
|
router = APIRouter(tags=["items"])
|
||||||
|
|
||||||
|
@router.post("/items", response_model=ItemResponse, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_item(item: ItemCreate, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Create a new item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item: The item data to create
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created item
|
||||||
|
"""
|
||||||
|
db_item = Item(**item.model_dump())
|
||||||
|
db.add(db_item)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(db_item)
|
||||||
|
return db_item
|
||||||
|
|
||||||
|
@router.get("/items", response_model=List[ItemResponse])
|
||||||
|
async def read_items(skip: int = 0, limit: int = 100, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Retrieve all items with pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
skip: Number of items to skip (for pagination)
|
||||||
|
limit: Maximum number of items to return
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of items
|
||||||
|
"""
|
||||||
|
result = await db.execute(select(Item).offset(skip).limit(limit))
|
||||||
|
items = result.scalars().all()
|
||||||
|
return items
|
||||||
|
|
||||||
|
@router.get("/items/{item_id}", response_model=ItemResponse)
|
||||||
|
async def read_item(item_id: int, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Retrieve a specific item by ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: The ID of the item to retrieve
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The requested item if found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If the item is not found
|
||||||
|
"""
|
||||||
|
result = await db.execute(select(Item).filter(Item.id == item_id))
|
||||||
|
item = result.scalars().first()
|
||||||
|
if item is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
|
return item
|
||||||
|
|
||||||
|
@router.put("/items/{item_id}", response_model=ItemResponse)
|
||||||
|
async def update_item(item_id: int, item: ItemUpdate, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Update an existing item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: The ID of the item to update
|
||||||
|
item: The updated item data
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The updated item
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If the item is not found
|
||||||
|
"""
|
||||||
|
result = await db.execute(select(Item).filter(Item.id == item_id))
|
||||||
|
db_item = result.scalars().first()
|
||||||
|
if db_item is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
|
|
||||||
|
update_data = item.model_dump(exclude_unset=True)
|
||||||
|
for key, value in update_data.items():
|
||||||
|
setattr(db_item, key, value)
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(db_item)
|
||||||
|
return db_item
|
||||||
|
|
||||||
|
@router.delete("/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_item(item_id: int, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Delete an item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: The ID of the item to delete
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
No content on successful deletion
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If the item is not found
|
||||||
|
"""
|
||||||
|
result = await db.execute(select(Item).filter(Item.id == item_id))
|
||||||
|
db_item = result.scalars().first()
|
||||||
|
if db_item is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
|
|
||||||
|
await db.delete(db_item)
|
||||||
|
await db.commit()
|
||||||
|
return None
|
1
app/db/__init__.py
Normal file
1
app/db/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# database package initialization
|
37
app/db/database.py
Normal file
37
app/db/database.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import declarative_base, sessionmaker
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Create the database directory
|
||||||
|
DB_DIR = Path("/app") / "storage" / "db"
|
||||||
|
DB_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# SQLite connection URL
|
||||||
|
SQLALCHEMY_DATABASE_URL = f"sqlite+aiosqlite:///{DB_DIR}/db.sqlite"
|
||||||
|
|
||||||
|
# Create the engine
|
||||||
|
engine = create_async_engine(
|
||||||
|
SQLALCHEMY_DATABASE_URL,
|
||||||
|
connect_args={"check_same_thread": False},
|
||||||
|
echo=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Session factory
|
||||||
|
SessionLocal = sessionmaker(
|
||||||
|
autocommit=False,
|
||||||
|
autoflush=False,
|
||||||
|
bind=engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Base class for ORM models
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
# Dependency for getting the database session
|
||||||
|
async def get_db():
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
await db.close()
|
1
app/models/__init__.py
Normal file
1
app/models/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# models package initialization
|
12
app/models/item.py
Normal file
12
app/models/item.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, Text, DateTime
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
from app.db.database import Base
|
||||||
|
|
||||||
|
class Item(Base):
|
||||||
|
__tablename__ = "items"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
name = Column(String(100), nullable=False, index=True)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
1
app/schemas/__init__.py
Normal file
1
app/schemas/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schemas package initialization
|
26
app/schemas/item.py
Normal file
26
app/schemas/item.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Base Item schema with common attributes
|
||||||
|
class ItemBase(BaseModel):
|
||||||
|
name: str = Field(..., min_length=1, max_length=100, examples=["Item name"])
|
||||||
|
description: Optional[str] = Field(None, examples=["Item description"])
|
||||||
|
|
||||||
|
# Schema for creating a new item
|
||||||
|
class ItemCreate(ItemBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Schema for updating an existing item
|
||||||
|
class ItemUpdate(BaseModel):
|
||||||
|
name: Optional[str] = Field(None, min_length=1, max_length=100, examples=["Updated item name"])
|
||||||
|
description: Optional[str] = Field(None, examples=["Updated item description"])
|
||||||
|
|
||||||
|
# Schema for item responses
|
||||||
|
class ItemResponse(ItemBase):
|
||||||
|
id: int
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
30
main.py
Normal file
30
main.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="Generic REST API Service",
|
||||||
|
description="A generic REST API service built with FastAPI and SQLite",
|
||||||
|
version="0.1.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure CORS
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Import routers
|
||||||
|
from app.api.health import router as health_router
|
||||||
|
from app.api.items import router as items_router
|
||||||
|
|
||||||
|
# Include routers
|
||||||
|
app.include_router(health_router)
|
||||||
|
app.include_router(items_router, prefix="/api")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
10
requirements.txt
Normal file
10
requirements.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
fastapi>=0.103.1
|
||||||
|
uvicorn>=0.23.2
|
||||||
|
sqlalchemy>=2.0.20
|
||||||
|
alembic>=1.12.0
|
||||||
|
pydantic>=2.3.0
|
||||||
|
aiosqlite>=0.19.0
|
||||||
|
python-multipart>=0.0.6
|
||||||
|
ruff>=0.0.290
|
||||||
|
pytest>=7.4.2
|
||||||
|
httpx>=0.25.0
|
Loading…
x
Reference in New Issue
Block a user