diff --git a/README.md b/README.md index e8acfba..de5505c 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,82 @@ -# FastAPI Application +# Task Manager API -This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform. +A simple and efficient Task Manager API built with FastAPI and SQLite. + +## Features + +- Create, read, update, and delete tasks +- Filter tasks by completion status +- Set task priorities +- Set due dates for tasks +- Health check endpoint + +## Technology Stack + +- **Python**: 3.8+ +- **Web Framework**: FastAPI +- **ORM**: SQLAlchemy 2.0+ +- **Database**: SQLite +- **Migrations**: Alembic +- **Linting**: Ruff + +## Project Structure + +``` +taskmanagerapi/ +│ +├── alembic/ # Database migration scripts +│ └── versions/ # Migration versions +│ +├── app/ # Application module +│ ├── api/ # API endpoints +│ │ └── routes/ # API route handlers +│ ├── core/ # Core application code +│ ├── crud/ # CRUD operations +│ ├── db/ # Database setup and connections +│ ├── models/ # SQLAlchemy models +│ └── schemas/ # Pydantic models for validation +│ +├── main.py # Application entry point +└── requirements.txt # Project dependencies +``` + +## API Endpoints + +### Tasks + +- `GET /api/v1/tasks`: Get all tasks (with optional filtering) +- `POST /api/v1/tasks`: Create a new task +- `GET /api/v1/tasks/{task_id}`: Get a specific task +- `PUT /api/v1/tasks/{task_id}`: Update a task +- `DELETE /api/v1/tasks/{task_id}`: Delete a task + +### Health Check + +- `GET /health`: Check API health + +## Installation and Setup + +1. Clone the repository +2. Install dependencies: + ``` + pip install -r requirements.txt + ``` +3. Run database migrations: + ``` + alembic upgrade head + ``` +4. Start the application: + ``` + uvicorn main:app --reload + ``` + +## API Documentation + +FastAPI automatically generates interactive API documentation at: + +- `/docs`: Swagger UI +- `/redoc`: ReDoc + +## License + +MIT \ No newline at end of file diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..48cc44c --- /dev/null +++ b/alembic.ini @@ -0,0 +1,85 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:////app/storage/db/db.sqlite + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..d08f1ec --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from app.db.base import Base # noqa +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() \ No newline at end of file diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..1e4564e --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} \ No newline at end of file diff --git a/alembic/versions/0001_create_tasks_table.py b/alembic/versions/0001_create_tasks_table.py new file mode 100644 index 0000000..a50d8df --- /dev/null +++ b/alembic/versions/0001_create_tasks_table.py @@ -0,0 +1,37 @@ +"""create tasks table + +Revision ID: 0001 +Revises: +Create Date: 2025-05-13 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0001' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + 'task', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('is_completed', sa.Boolean(), default=False), + sa.Column('priority', sa.Integer(), default=1), + sa.Column('due_date', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False) + + +def downgrade(): + op.drop_index(op.f('ix_task_id'), table_name='task') + op.drop_table('task') \ No newline at end of file diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/routes/__init__.py b/app/api/routes/__init__.py new file mode 100644 index 0000000..872881a --- /dev/null +++ b/app/api/routes/__init__.py @@ -0,0 +1,6 @@ +from fastapi import APIRouter + +from app.api.routes import tasks + +router = APIRouter() +router.include_router(tasks.router, prefix="/tasks", tags=["tasks"]) \ No newline at end of file diff --git a/app/api/routes/tasks.py b/app/api/routes/tasks.py new file mode 100644 index 0000000..0ee7549 --- /dev/null +++ b/app/api/routes/tasks.py @@ -0,0 +1,84 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session + +from app import crud +from app.db.session import get_db +from app.schemas.task import Task, TaskCreate, TaskUpdate + +router = APIRouter() + + +@router.get("/", response_model=List[Task]) +def read_tasks( + is_completed: Optional[bool] = None, + skip: int = 0, + limit: int = 100, + db: Session = Depends(get_db), +): + """ + Retrieve tasks with optional filtering. + """ + tasks = crud.task.get_tasks(db, skip=skip, limit=limit, is_completed=is_completed) + return tasks + + +@router.post("/", response_model=Task) +def create_task( + task_in: TaskCreate, + db: Session = Depends(get_db), +): + """ + Create new task. + """ + task = crud.task.create_task(db=db, task_in=task_in) + return task + + +@router.get("/{task_id}", response_model=Task) +def read_task( + task_id: int, + db: Session = Depends(get_db), +): + """ + Get task by ID. + """ + task = crud.task.get_task(db=db, task_id=task_id) + if not task: + raise HTTPException(status_code=404, detail="Task not found") + return task + + +@router.put("/{task_id}", response_model=Task) +def update_task( + task_id: int, + task_in: TaskUpdate, + db: Session = Depends(get_db), +): + """ + Update a task. + """ + task = crud.task.get_task(db=db, task_id=task_id) + if not task: + raise HTTPException(status_code=404, detail="Task not found") + + task = crud.task.update_task(db=db, task=task, task_in=task_in) + return task + + +@router.delete("/{task_id}", response_model=Task) +def delete_task( + task_id: int, + db: Session = Depends(get_db), +): + """ + Delete a task. + """ + task = crud.task.get_task(db=db, task_id=task_id) + if not task: + raise HTTPException(status_code=404, detail="Task not found") + + task_data = Task.from_orm(task) + crud.task.delete_task(db=db, task=task) + + return task_data \ No newline at end of file diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..57db8eb --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,16 @@ +from pydantic_settings import BaseSettings +from pathlib import Path +from typing import Optional + + +class Settings(BaseSettings): + API_V1_STR: str = "/api/v1" + PROJECT_NAME: str = "Task Manager API" + + # Database settings + DB_DIR = Path("/app") / "storage" / "db" + DB_DIR.mkdir(parents=True, exist_ok=True) + DATABASE_URL: str = f"sqlite:///{DB_DIR}/db.sqlite" + + +settings = Settings() \ No newline at end of file diff --git a/app/crud/__init__.py b/app/crud/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/crud/task.py b/app/crud/task.py new file mode 100644 index 0000000..afc6e99 --- /dev/null +++ b/app/crud/task.py @@ -0,0 +1,54 @@ +from typing import List, Optional +from sqlalchemy.orm import Session + +from app.models.task import Task +from app.schemas.task import TaskCreate, TaskUpdate + + +def get_task(db: Session, task_id: int) -> Optional[Task]: + return db.query(Task).filter(Task.id == task_id).first() + + +def get_tasks( + db: Session, + skip: int = 0, + limit: int = 100, + is_completed: Optional[bool] = None +) -> List[Task]: + query = db.query(Task) + + if is_completed is not None: + query = query.filter(Task.is_completed == is_completed) + + return query.order_by(Task.created_at.desc()).offset(skip).limit(limit).all() + + +def create_task(db: Session, task_in: TaskCreate) -> Task: + db_task = Task( + title=task_in.title, + description=task_in.description, + is_completed=task_in.is_completed, + priority=task_in.priority, + due_date=task_in.due_date, + ) + db.add(db_task) + db.commit() + db.refresh(db_task) + return db_task + + +def update_task(db: Session, task: Task, task_in: TaskUpdate) -> Task: + update_data = task_in.dict(exclude_unset=True) + + for field, value in update_data.items(): + setattr(task, field, value) + + db.add(task) + db.commit() + db.refresh(task) + return task + + +def delete_task(db: Session, task: Task) -> None: + db.delete(task) + db.commit() \ No newline at end of file diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/db/base.py b/app/db/base.py new file mode 100644 index 0000000..e1c3785 --- /dev/null +++ b/app/db/base.py @@ -0,0 +1,4 @@ +# Import all the models, so that Base has them before being +# imported by Alembic +from app.db.base_class import Base # noqa +from app.models.task import Task # noqa \ No newline at end of file diff --git a/app/db/base_class.py b/app/db/base_class.py new file mode 100644 index 0000000..df4bcda --- /dev/null +++ b/app/db/base_class.py @@ -0,0 +1,13 @@ +from typing import Any +from sqlalchemy.ext.declarative import as_declarative, declared_attr + + +@as_declarative() +class Base: + id: Any + __name__: str + + # Generate __tablename__ automatically + @declared_attr + def __tablename__(cls) -> str: + return cls.__name__.lower() \ No newline at end of file diff --git a/app/db/session.py b/app/db/session.py new file mode 100644 index 0000000..fdbba60 --- /dev/null +++ b/app/db/session.py @@ -0,0 +1,22 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from pathlib import Path + +DB_DIR = Path("/app") / "storage" / "db" +DB_DIR.mkdir(parents=True, exist_ok=True) + +SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_DIR}/db.sqlite" + +engine = create_engine( + SQLALCHEMY_DATABASE_URL, + connect_args={"check_same_thread": False} +) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/task.py b/app/models/task.py new file mode 100644 index 0000000..4c8abcb --- /dev/null +++ b/app/models/task.py @@ -0,0 +1,15 @@ +from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text +from sqlalchemy.sql import func + +from app.db.base_class import Base + + +class Task(Base): + id = Column(Integer, primary_key=True, index=True) + title = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + is_completed = Column(Boolean, default=False) + priority = Column(Integer, default=1) # 1 = Low, 2 = Medium, 3 = High + due_date = Column(DateTime, nullable=True) + created_at = Column(DateTime, server_default=func.now()) + updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now()) \ No newline at end of file diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/schemas/task.py b/app/schemas/task.py new file mode 100644 index 0000000..3a98511 --- /dev/null +++ b/app/schemas/task.py @@ -0,0 +1,40 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +# Shared properties +class TaskBase(BaseModel): + title: str + description: Optional[str] = None + is_completed: bool = False + priority: int = Field(1, description="1 = Low, 2 = Medium, 3 = High", ge=1, le=3) + due_date: Optional[datetime] = None + + +# Properties to receive on task creation +class TaskCreate(TaskBase): + pass + + +# Properties to receive on task update +class TaskUpdate(TaskBase): + title: Optional[str] = None + is_completed: Optional[bool] = None + priority: Optional[int] = Field(None, description="1 = Low, 2 = Medium, 3 = High", ge=1, le=3) + + +# Properties shared by models stored in DB +class TaskInDBBase(TaskBase): + id: int + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +# Properties to return to client +class Task(TaskInDBBase): + pass \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..7a41f37 --- /dev/null +++ b/main.py @@ -0,0 +1,34 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +import uvicorn +from pathlib import Path + +from app.api.routes import router as api_router +from app.core.config import settings + +app = FastAPI( + title=settings.PROJECT_NAME, + description="Task Manager API", + version="0.1.0", + docs_url="/docs", + redoc_url="/redoc", +) + +# Set CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(api_router, prefix=settings.API_V1_STR) + +@app.get("/health", status_code=200) +def health_check(): + """Health check endpoint""" + return {"status": "healthy"} + +if __name__ == "__main__": + uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..60b7917 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +fastapi>=0.95.0 +uvicorn>=0.22.0 +pydantic>=2.0.0 +sqlalchemy>=2.0.0 +alembic>=1.11.0 +python-dotenv>=1.0.0 +ruff>=0.0.270 \ No newline at end of file