Implement Task Manager API
- Set up project structure with FastAPI - Configure SQLite database with SQLAlchemy - Create Task model with Alembic migrations - Implement CRUD API endpoints for tasks - Add health check and CORS configuration - Update documentation
This commit is contained in:
parent
52f37e35ef
commit
c1765ee96b
80
README.md
80
README.md
@ -1,3 +1,79 @@
|
|||||||
# FastAPI Application
|
# Task Manager API
|
||||||
|
|
||||||
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
|
This is a FastAPI-based Task Manager API with SQLite backend. It provides endpoints for managing tasks with features like priority, due dates, and filtering.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Create, read, update, and delete tasks
|
||||||
|
- Mark tasks as completed
|
||||||
|
- Filter tasks by completion status and priority
|
||||||
|
- SQLite database with SQLAlchemy ORM
|
||||||
|
- Alembic for database migrations
|
||||||
|
- FastAPI automatic API documentation
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
- `GET /health`: Health check endpoint
|
||||||
|
- `GET /api/v1/tasks`: List all tasks (with optional filtering)
|
||||||
|
- `POST /api/v1/tasks`: Create a new task
|
||||||
|
- `GET /api/v1/tasks/{task_id}`: Get a specific task
|
||||||
|
- `PUT /api/v1/tasks/{task_id}`: Update a task
|
||||||
|
- `DELETE /api/v1/tasks/{task_id}`: Delete a task
|
||||||
|
- `PATCH /api/v1/tasks/{task_id}/complete`: Mark a task as completed
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
taskmanagerapi/
|
||||||
|
├── alembic.ini # Alembic configuration
|
||||||
|
├── main.py # FastAPI application entry point
|
||||||
|
├── requirements.txt # Python dependencies
|
||||||
|
├── app/ # Application code
|
||||||
|
│ ├── api/ # API routes
|
||||||
|
│ │ └── v1/ # API version 1
|
||||||
|
│ │ ├── endpoints/ # API endpoint handlers
|
||||||
|
│ │ └── router.py # API router
|
||||||
|
│ ├── core/ # Core application code
|
||||||
|
│ │ └── config.py # Application configuration
|
||||||
|
│ ├── db/ # Database related code
|
||||||
|
│ │ └── session.py # Database session setup
|
||||||
|
│ ├── models/ # SQLAlchemy models
|
||||||
|
│ │ └── task.py # Task model
|
||||||
|
│ └── schemas/ # Pydantic schemas
|
||||||
|
│ └── task.py # Task schemas
|
||||||
|
└── migrations/ # Database migrations
|
||||||
|
├── env.py # Alembic environment
|
||||||
|
├── script.py.mako # Migration script template
|
||||||
|
└── versions/ # Migration versions
|
||||||
|
└── 01_initial_migration.py # Initial migration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.8+
|
||||||
|
- pip (Python package installer)
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. Clone the repository
|
||||||
|
2. Install dependencies:
|
||||||
|
```
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
3. Apply migrations:
|
||||||
|
```
|
||||||
|
alembic upgrade head
|
||||||
|
```
|
||||||
|
4. Start the server:
|
||||||
|
```
|
||||||
|
uvicorn main:app --reload
|
||||||
|
```
|
||||||
|
|
||||||
|
### API Documentation
|
||||||
|
|
||||||
|
Once the server is running, you can access:
|
||||||
|
- Swagger UI documentation: http://localhost:8000/docs
|
||||||
|
- ReDoc documentation: http://localhost:8000/redoc
|
||||||
|
- OpenAPI JSON: http://localhost:8000/openapi.json
|
74
alembic.ini
Normal file
74
alembic.ini
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# SQLite URL - using absolute path
|
||||||
|
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = INFO
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stdout,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
105
app/api/v1/endpoints/tasks.py
Normal file
105
app/api/v1/endpoints/tasks.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from app.db.session import get_db
|
||||||
|
from app.models.task import Task as TaskModel
|
||||||
|
from app.schemas.task import Task, TaskCreate, TaskUpdate
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[Task])
|
||||||
|
def read_tasks(
|
||||||
|
skip: int = 0,
|
||||||
|
limit: int = 100,
|
||||||
|
completed: Optional[bool] = None,
|
||||||
|
priority: Optional[int] = None,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Retrieve tasks with optional filtering.
|
||||||
|
"""
|
||||||
|
query = db.query(TaskModel)
|
||||||
|
|
||||||
|
# Apply filters if provided
|
||||||
|
if completed is not None:
|
||||||
|
query = query.filter(TaskModel.is_completed == completed)
|
||||||
|
|
||||||
|
if priority is not None:
|
||||||
|
query = query.filter(TaskModel.priority == priority)
|
||||||
|
|
||||||
|
# Apply pagination
|
||||||
|
tasks = query.order_by(TaskModel.created_at.desc()).offset(skip).limit(limit).all()
|
||||||
|
return tasks
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
|
||||||
|
def create_task(task: TaskCreate, db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Create a new task.
|
||||||
|
"""
|
||||||
|
db_task = TaskModel(**task.model_dump())
|
||||||
|
db.add(db_task)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(db_task)
|
||||||
|
return db_task
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{task_id}", response_model=Task)
|
||||||
|
def read_task(task_id: int, db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Get a specific task by ID.
|
||||||
|
"""
|
||||||
|
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
|
||||||
|
if db_task is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Task not found")
|
||||||
|
return db_task
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{task_id}", response_model=Task)
|
||||||
|
def update_task(task_id: int, task: TaskUpdate, db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Update a task.
|
||||||
|
"""
|
||||||
|
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
|
||||||
|
if db_task is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Task not found")
|
||||||
|
|
||||||
|
# Update only the fields that are set
|
||||||
|
update_data = task.model_dump(exclude_unset=True)
|
||||||
|
for key, value in update_data.items():
|
||||||
|
setattr(db_task, key, value)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(db_task)
|
||||||
|
return db_task
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
|
||||||
|
def delete_task(task_id: int, db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Delete a task.
|
||||||
|
"""
|
||||||
|
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
|
||||||
|
if db_task is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Task not found")
|
||||||
|
|
||||||
|
db.delete(db_task)
|
||||||
|
db.commit()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{task_id}/complete", response_model=Task)
|
||||||
|
def complete_task(task_id: int, db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Mark a task as completed.
|
||||||
|
"""
|
||||||
|
db_task = db.query(TaskModel).filter(TaskModel.id == task_id).first()
|
||||||
|
if db_task is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Task not found")
|
||||||
|
|
||||||
|
db_task.is_completed = True
|
||||||
|
db.commit()
|
||||||
|
db.refresh(db_task)
|
||||||
|
return db_task
|
6
app/api/v1/router.py
Normal file
6
app/api/v1/router.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from fastapi import APIRouter
|
||||||
|
from app.api.v1.endpoints import tasks
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
router.include_router(tasks.router, prefix="/tasks", tags=["Tasks"])
|
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
19
app/core/config.py
Normal file
19
app/core/config.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
PROJECT_NAME: str = "Task Manager API"
|
||||||
|
PROJECT_DESCRIPTION: str = "A FastAPI Task Manager API with SQLite backend"
|
||||||
|
VERSION: str = "0.1.0"
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DB_DIR: Path = Path("/app") / "storage" / "db"
|
||||||
|
SQLALCHEMY_DATABASE_URL: str = f"sqlite:///{DB_DIR}/db.sqlite"
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
env_file_encoding = "utf-8"
|
||||||
|
|
||||||
|
|
||||||
|
settings = Settings()
|
0
app/db/__init__.py
Normal file
0
app/db/__init__.py
Normal file
27
app/db/session.py
Normal file
27
app/db/session.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Ensure the DB directory exists
|
||||||
|
settings.DB_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create SQLAlchemy engine
|
||||||
|
engine = create_engine(
|
||||||
|
settings.SQLALCHEMY_DATABASE_URL,
|
||||||
|
connect_args={"check_same_thread": False} # Only needed for SQLite
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create SessionLocal class
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
|
# Create Base class for models
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
# Dependency to get DB session
|
||||||
|
def get_db():
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
3
app/models/__init__.py
Normal file
3
app/models/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from app.models.task import Task
|
||||||
|
|
||||||
|
__all__ = ["Task"]
|
15
app/models/task.py
Normal file
15
app/models/task.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, Text, Boolean, DateTime, func
|
||||||
|
from app.db.session import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Task(Base):
|
||||||
|
__tablename__ = "tasks"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
title = Column(String(255), nullable=False, index=True)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
is_completed = Column(Boolean, default=False)
|
||||||
|
priority = Column(Integer, default=1) # 1=Low, 2=Medium, 3=High
|
||||||
|
due_date = Column(DateTime, nullable=True)
|
||||||
|
created_at = Column(DateTime, default=func.now(), nullable=False)
|
||||||
|
updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False)
|
3
app/schemas/__init__.py
Normal file
3
app/schemas/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from app.schemas.task import Task, TaskCreate, TaskUpdate
|
||||||
|
|
||||||
|
__all__ = ["Task", "TaskCreate", "TaskUpdate"]
|
37
app/schemas/task.py
Normal file
37
app/schemas/task.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class TaskBase(BaseModel):
|
||||||
|
title: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
is_completed: bool = False
|
||||||
|
priority: int = Field(1, ge=1, le=3, description="Task priority: 1=Low, 2=Medium, 3=High")
|
||||||
|
due_date: Optional[datetime] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TaskCreate(TaskBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TaskUpdate(BaseModel):
|
||||||
|
title: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
is_completed: Optional[bool] = None
|
||||||
|
priority: Optional[int] = Field(None, ge=1, le=3, description="Task priority: 1=Low, 2=Medium, 3=High")
|
||||||
|
due_date: Optional[datetime] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TaskInDBBase(TaskBase):
|
||||||
|
id: int
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class Task(TaskInDBBase):
|
||||||
|
pass
|
34
main.py
Normal file
34
main.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import uvicorn
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from app.api.v1.router import router as api_router
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title=settings.PROJECT_NAME,
|
||||||
|
description=settings.PROJECT_DESCRIPTION,
|
||||||
|
version=settings.VERSION,
|
||||||
|
openapi_url="/openapi.json",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set up CORS
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include API router
|
||||||
|
app.include_router(api_router, prefix="/api/v1")
|
||||||
|
|
||||||
|
# Health check endpoint
|
||||||
|
@app.get("/health", tags=["Health"])
|
||||||
|
async def health_check():
|
||||||
|
return {"status": "healthy"}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
83
migrations/env.py
Normal file
83
migrations/env.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# Import Base and all models
|
||||||
|
from app.db.session import Base
|
||||||
|
import app.models # noqa
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
is_sqlite = connection.dialect.name == 'sqlite'
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
render_as_batch=is_sqlite, # Enable batch mode for SQLite
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
43
migrations/versions/01_initial_migration.py
Normal file
43
migrations/versions/01_initial_migration.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""Initial migration
|
||||||
|
|
||||||
|
Revision ID: 01_initial_migration
|
||||||
|
Revises:
|
||||||
|
Create Date: 2023-07-09
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '01_initial_migration'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create tasks table
|
||||||
|
op.create_table(
|
||||||
|
'tasks',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('title', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('is_completed', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('priority', sa.Integer(), nullable=False, default=1),
|
||||||
|
sa.Column('due_date', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create index on title
|
||||||
|
op.create_index(op.f('ix_tasks_id'), 'tasks', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_tasks_title'), 'tasks', ['title'], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop tasks table
|
||||||
|
op.drop_index(op.f('ix_tasks_title'), table_name='tasks')
|
||||||
|
op.drop_index(op.f('ix_tasks_id'), table_name='tasks')
|
||||||
|
op.drop_table('tasks')
|
8
requirements.txt
Normal file
8
requirements.txt
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
fastapi>=0.100.0
|
||||||
|
uvicorn>=0.22.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
pydantic-settings>=2.0.0
|
||||||
|
sqlalchemy>=2.0.0
|
||||||
|
alembic>=1.11.0
|
||||||
|
python-dotenv>=1.0.0
|
||||||
|
ruff>=0.0.270
|
Loading…
x
Reference in New Issue
Block a user