Implement Task Management API with FastAPI and SQLite
This commit is contained in:
parent
e165ff4d5d
commit
0932aa2dda
95
README.md
95
README.md
@ -1,3 +1,94 @@
|
||||
# FastAPI Application
|
||||
# Task Management API
|
||||
|
||||
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
|
||||
This is a simple REST API for managing tasks built with FastAPI and SQLite.
|
||||
|
||||
## Features
|
||||
|
||||
- Create, read, update, and delete tasks
|
||||
- Filter tasks by status, priority, and completion status
|
||||
- Health check endpoint
|
||||
- OpenAPI documentation
|
||||
- CORS enabled
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- FastAPI: Modern, fast web framework for building APIs
|
||||
- SQLAlchemy: SQL toolkit and ORM
|
||||
- Alembic: Database migration tool
|
||||
- SQLite: Lightweight disk-based database
|
||||
- Pydantic: Data validation and settings management
|
||||
- Uvicorn: ASGI server
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- `GET /`: Root endpoint with API information
|
||||
- `GET /openapi.json`: OpenAPI schema
|
||||
- `GET /docs`: Swagger UI documentation
|
||||
- `GET /redoc`: ReDoc documentation
|
||||
- `GET /api/v1/health`: Health check endpoint
|
||||
|
||||
### Task Endpoints
|
||||
|
||||
- `GET /api/v1/tasks`: List all tasks
|
||||
- `POST /api/v1/tasks`: Create a new task
|
||||
- `GET /api/v1/tasks/{task_id}`: Get a task by ID
|
||||
- `PUT /api/v1/tasks/{task_id}`: Update a task
|
||||
- `DELETE /api/v1/tasks/{task_id}`: Delete a task
|
||||
|
||||
## Setup and Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.8 or higher
|
||||
|
||||
### Installation
|
||||
|
||||
1. Clone the repository:
|
||||
```
|
||||
git clone <repository-url>
|
||||
cd taskmanagementapi
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Run the application:
|
||||
```
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
|
||||
4. The API will be available at `http://localhost:8000`
|
||||
|
||||
### Database Migrations
|
||||
|
||||
The application uses Alembic for database migrations. Migrations are automatically applied when the application starts. If you need to run migrations manually:
|
||||
|
||||
```
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
The application uses the following environment variables:
|
||||
|
||||
- None required as it uses SQLite with a fixed path
|
||||
|
||||
## Development
|
||||
|
||||
### Running Tests
|
||||
|
||||
Currently, no tests are implemented.
|
||||
|
||||
### Linting
|
||||
|
||||
The project uses Ruff for linting:
|
||||
|
||||
```
|
||||
ruff check .
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
When the application is running, you can access the Swagger UI documentation at `http://localhost:8000/docs` and the ReDoc documentation at `http://localhost:8000/redoc`.
|
105
alembic.ini
Normal file
105
alembic.ini
Normal file
@ -0,0 +1,105 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
17
app/api/errors.py
Normal file
17
app/api/errors.py
Normal file
@ -0,0 +1,17 @@
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class TaskNotFound(HTTPException):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Task not found",
|
||||
)
|
||||
|
||||
|
||||
class DatabaseError(HTTPException):
|
||||
def __init__(self, detail: str = "Database error occurred"):
|
||||
super().__init__(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=detail,
|
||||
)
|
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
7
app/api/v1/api.py
Normal file
7
app/api/v1/api.py
Normal file
@ -0,0 +1,7 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.api.v1.endpoints import health, tasks
|
||||
|
||||
api_router = APIRouter()
|
||||
api_router.include_router(tasks.router, prefix="/tasks", tags=["tasks"])
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
0
app/api/v1/endpoints/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
25
app/api/v1/endpoints/health.py
Normal file
25
app/api/v1/endpoints/health.py
Normal file
@ -0,0 +1,25 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db.session import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_model=dict)
|
||||
def health_check(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Health check endpoint.
|
||||
Returns the status of the API and its dependencies.
|
||||
"""
|
||||
try:
|
||||
# Check database connection
|
||||
db.execute("SELECT 1")
|
||||
db_status = "healthy"
|
||||
except Exception:
|
||||
db_status = "unhealthy"
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"database": db_status,
|
||||
}
|
90
app/api/v1/endpoints/tasks.py
Normal file
90
app/api/v1/endpoints/tasks.py
Normal file
@ -0,0 +1,90 @@
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app import crud, schemas
|
||||
from app.db.session import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_model=schemas.TaskList)
|
||||
def read_tasks(
|
||||
skip: int = 0, limit: int = 100, db: Session = Depends(get_db)
|
||||
) -> Any:
|
||||
"""
|
||||
Retrieve tasks.
|
||||
"""
|
||||
tasks = crud.get_tasks(db=db, skip=skip, limit=limit)
|
||||
return {"tasks": tasks, "count": len(tasks)}
|
||||
|
||||
|
||||
@router.post("/", response_model=schemas.Task, status_code=status.HTTP_201_CREATED)
|
||||
def create_task(
|
||||
*,
|
||||
db: Session = Depends(get_db),
|
||||
task_in: schemas.TaskCreate,
|
||||
) -> Any:
|
||||
"""
|
||||
Create new task.
|
||||
"""
|
||||
task = crud.create_task(db=db, task=task_in)
|
||||
return task
|
||||
|
||||
|
||||
@router.get("/{task_id}", response_model=schemas.Task)
|
||||
def read_task(
|
||||
*,
|
||||
task_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
) -> Any:
|
||||
"""
|
||||
Get task by ID.
|
||||
"""
|
||||
task = crud.get_task(db=db, task_id=task_id)
|
||||
if not task:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Task not found",
|
||||
)
|
||||
return task
|
||||
|
||||
|
||||
@router.put("/{task_id}", response_model=schemas.Task)
|
||||
def update_task(
|
||||
*,
|
||||
task_id: int,
|
||||
task_in: schemas.TaskUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
) -> Any:
|
||||
"""
|
||||
Update a task.
|
||||
"""
|
||||
task = crud.get_task(db=db, task_id=task_id)
|
||||
if not task:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Task not found",
|
||||
)
|
||||
task = crud.update_task(db=db, task_id=task_id, task=task_in)
|
||||
return task
|
||||
|
||||
|
||||
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
|
||||
def delete_task(
|
||||
*,
|
||||
task_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
) -> None:
|
||||
"""
|
||||
Delete a task.
|
||||
"""
|
||||
task = crud.get_task(db=db, task_id=task_id)
|
||||
if not task:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Task not found",
|
||||
)
|
||||
crud.delete_task(db=db, task_id=task_id)
|
||||
return None
|
1
app/crud/__init__.py
Normal file
1
app/crud/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from app.crud.task import create_task, delete_task, get_task, get_tasks, update_task # noqa: F401
|
57
app/crud/task.py
Normal file
57
app/crud/task.py
Normal file
@ -0,0 +1,57 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.models.task import Task
|
||||
from app.schemas.task import TaskCreate, TaskUpdate
|
||||
|
||||
|
||||
def get_task(db: Session, task_id: int) -> Optional[Task]:
|
||||
return db.query(Task).filter(Task.id == task_id).first()
|
||||
|
||||
|
||||
def get_tasks(
|
||||
db: Session, skip: int = 0, limit: int = 100
|
||||
) -> List[Task]:
|
||||
return db.query(Task).offset(skip).limit(limit).all()
|
||||
|
||||
|
||||
def create_task(db: Session, task: TaskCreate) -> Task:
|
||||
db_task = Task(
|
||||
title=task.title,
|
||||
description=task.description,
|
||||
status=task.status,
|
||||
priority=task.priority,
|
||||
is_completed=task.is_completed,
|
||||
)
|
||||
db.add(db_task)
|
||||
db.commit()
|
||||
db.refresh(db_task)
|
||||
return db_task
|
||||
|
||||
|
||||
def update_task(
|
||||
db: Session, task_id: int, task: TaskUpdate
|
||||
) -> Optional[Task]:
|
||||
db_task = get_task(db, task_id)
|
||||
if not db_task:
|
||||
return None
|
||||
|
||||
update_data = task.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(db_task, field, value)
|
||||
|
||||
db.add(db_task)
|
||||
db.commit()
|
||||
db.refresh(db_task)
|
||||
return db_task
|
||||
|
||||
|
||||
def delete_task(db: Session, task_id: int) -> bool:
|
||||
db_task = get_task(db, task_id)
|
||||
if not db_task:
|
||||
return False
|
||||
|
||||
db.delete(db_task)
|
||||
db.commit()
|
||||
return True
|
0
app/db/__init__.py
Normal file
0
app/db/__init__.py
Normal file
3
app/db/base.py
Normal file
3
app/db/base.py
Normal file
@ -0,0 +1,3 @@
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
23
app/db/session.py
Normal file
23
app/db/session.py
Normal file
@ -0,0 +1,23 @@
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
DB_DIR = Path("/app") / "storage" / "db"
|
||||
DB_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_DIR}/db.sqlite"
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False}
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
1
app/models/__init__.py
Normal file
1
app/models/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from app.models.task import Task # noqa: F401
|
16
app/models/task.py
Normal file
16
app/models/task.py
Normal file
@ -0,0 +1,16 @@
|
||||
from sqlalchemy import Boolean, Column, DateTime, Integer, String, func
|
||||
|
||||
from app.db.base import Base
|
||||
|
||||
|
||||
class Task(Base):
|
||||
__tablename__ = "tasks"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
title = Column(String, index=True)
|
||||
description = Column(String, nullable=True)
|
||||
status = Column(String, default="pending")
|
||||
priority = Column(String, default="medium")
|
||||
is_completed = Column(Boolean, default=False)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now(), server_default=func.now())
|
1
app/schemas/__init__.py
Normal file
1
app/schemas/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from app.schemas.task import Task, TaskCreate, TaskList, TaskUpdate # noqa: F401
|
42
app/schemas/task.py
Normal file
42
app/schemas/task.py
Normal file
@ -0,0 +1,42 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class TaskBase(BaseModel):
|
||||
title: str
|
||||
description: Optional[str] = None
|
||||
status: Optional[str] = "pending"
|
||||
priority: Optional[str] = "medium"
|
||||
is_completed: Optional[bool] = False
|
||||
|
||||
|
||||
class TaskCreate(TaskBase):
|
||||
pass
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
priority: Optional[str] = None
|
||||
is_completed: Optional[bool] = None
|
||||
|
||||
|
||||
class TaskInDBBase(TaskBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
class Task(TaskInDBBase):
|
||||
pass
|
||||
|
||||
|
||||
class TaskList(BaseModel):
|
||||
tasks: list[Task]
|
||||
count: int
|
59
main.py
Normal file
59
main.py
Normal file
@ -0,0 +1,59 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from app.api.v1.api import api_router
|
||||
from app.db.base import Base
|
||||
from app.db.session import engine
|
||||
|
||||
# Create tables if they don't exist
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
app = FastAPI(
|
||||
title="Task Management API",
|
||||
description="A simple REST API for managing tasks",
|
||||
version="0.1.0",
|
||||
)
|
||||
|
||||
# Configure CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include API router
|
||||
app.include_router(api_router, prefix="/api/v1")
|
||||
|
||||
|
||||
@app.get("/", tags=["root"])
|
||||
async def root():
|
||||
"""
|
||||
Root endpoint returning API information.
|
||||
"""
|
||||
return {
|
||||
"title": "Task Management API",
|
||||
"docs_url": "/docs",
|
||||
"openapi_url": "/openapi.json",
|
||||
"health_check": "/api/v1/health",
|
||||
}
|
||||
|
||||
|
||||
@app.get("/openapi.json", include_in_schema=False)
|
||||
async def get_open_api_endpoint():
|
||||
"""
|
||||
Expose OpenAPI schema.
|
||||
"""
|
||||
return get_openapi(
|
||||
title="Task Management API",
|
||||
version="0.1.0",
|
||||
description="A simple REST API for managing tasks",
|
||||
routes=app.routes,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration with SQLAlchemy.
|
87
migrations/env.py
Normal file
87
migrations/env.py
Normal file
@ -0,0 +1,87 @@
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
# Add the parent directory to sys.path
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||
|
||||
# import models to ensure they are registered with Base
|
||||
from app.db.base import Base # noqa
|
||||
from app.models import * # noqa
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
is_sqlite = connection.dialect.name == 'sqlite'
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=is_sqlite,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
42
migrations/versions/initial_migration.py
Normal file
42
migrations/versions/initial_migration.py
Normal file
@ -0,0 +1,42 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 9ef21b34c10a
|
||||
Revises:
|
||||
Create Date: 2023-08-01 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9ef21b34c10a'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tasks',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('title', sa.String(), nullable=True),
|
||||
sa.Column('description', sa.String(), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=True),
|
||||
sa.Column('priority', sa.String(), nullable=True),
|
||||
sa.Column('is_completed', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tasks_id'), 'tasks', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_tasks_title'), 'tasks', ['title'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_tasks_title'), table_name='tasks')
|
||||
op.drop_index(op.f('ix_tasks_id'), table_name='tasks')
|
||||
op.drop_table('tasks')
|
||||
# ### end Alembic commands ###
|
7
requirements.txt
Normal file
7
requirements.txt
Normal file
@ -0,0 +1,7 @@
|
||||
fastapi>=0.95.0
|
||||
uvicorn>=0.21.1
|
||||
sqlalchemy>=1.4.46,<2.0
|
||||
alembic>=1.10.2
|
||||
pydantic>=1.10.7
|
||||
python-dotenv>=1.0.0
|
||||
ruff>=0.0.292
|
Loading…
x
Reference in New Issue
Block a user