Fix requirements.txt issue and lint code

- Add requirements.txt file to git
- Add project files to git
- Fix linting issues in task.py and env.py
- Update SQLAlchemy queries to use 'not' instead of '== False'
- Fix import ordering in env.py
This commit is contained in:
Automated Action 2025-06-06 10:44:18 +00:00
parent eed1762719
commit 73c662b24c
27 changed files with 676 additions and 0 deletions

85
alembic.ini Normal file
View File

@ -0,0 +1,85 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# SQLite URL - using absolute path
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
app/__init__.py Normal file
View File

0
app/api/__init__.py Normal file
View File

0
app/api/v1/__init__.py Normal file
View File

11
app/api/v1/api.py Normal file
View File

@ -0,0 +1,11 @@
from fastapi import APIRouter
from app.core.config import settings
api_router = APIRouter(prefix=settings.API_V1_STR)
# Import and include routers for different resources
# Example: from app.api.v1.endpoints import tasks
# api_router.include_router(tasks.router, prefix="/tasks", tags=["tasks"])
# We'll add the tasks router implementation later

0
app/core/__init__.py Normal file
View File

22
app/core/config.py Normal file
View File

@ -0,0 +1,22 @@
import os
from pathlib import Path
from typing import List, Optional
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
# Base settings
PROJECT_NAME: str = "Task Manager API"
API_V1_STR: str = "/api/v1"
# Database settings
DB_DIR: Path = Path("/app/storage/db")
class Config:
case_sensitive = True
env_file = ".env"
settings = Settings()
# Ensure DB directory exists
settings.DB_DIR.mkdir(parents=True, exist_ok=True)

19
app/crud/__init__.py Normal file
View File

@ -0,0 +1,19 @@
from app.crud.task import (
get_task,
get_tasks,
get_tasks_count,
create_task,
update_task,
delete_task,
permanently_delete_task,
)
__all__ = [
"get_task",
"get_tasks",
"get_tasks_count",
"create_task",
"update_task",
"delete_task",
"permanently_delete_task",
]

141
app/crud/task.py Normal file
View File

@ -0,0 +1,141 @@
from datetime import datetime
from typing import List, Optional, Union, Dict, Any
from sqlalchemy import and_, desc, asc
from sqlalchemy.orm import Session
from app.models.task import Task, TaskStatus
from app.schemas.task import TaskCreate, TaskUpdate
def get_task(db: Session, task_id: int) -> Optional[Task]:
"""
Get a task by ID.
"""
return db.query(Task).filter(Task.id == task_id, Task.is_deleted == False).first()
def get_tasks(
db: Session,
skip: int = 0,
limit: int = 100,
status: Optional[TaskStatus] = None,
search: Optional[str] = None,
sort_by: str = "created_at",
sort_order: str = "desc"
) -> List[Task]:
"""
Get a list of tasks with filtering, sorting and pagination.
"""
query = db.query(Task).filter(Task.is_deleted == False)
# Apply status filter if provided
if status:
query = query.filter(Task.status == status)
# Apply search filter if provided
if search:
query = query.filter(
Task.title.ilike(f"%{search}%") | Task.description.ilike(f"%{search}%")
)
# Apply sorting
if sort_order.lower() == "asc":
query = query.order_by(asc(getattr(Task, sort_by)))
else:
query = query.order_by(desc(getattr(Task, sort_by)))
# Apply pagination
tasks = query.offset(skip).limit(limit).all()
return tasks
def get_tasks_count(
db: Session,
status: Optional[TaskStatus] = None,
search: Optional[str] = None,
) -> int:
"""
Get the total count of tasks with the specified filters.
"""
query = db.query(Task).filter(Task.is_deleted == False)
# Apply status filter if provided
if status:
query = query.filter(Task.status == status)
# Apply search filter if provided
if search:
query = query.filter(
Task.title.ilike(f"%{search}%") | Task.description.ilike(f"%{search}%")
)
return query.count()
def create_task(db: Session, task: TaskCreate) -> Task:
"""
Create a new task.
"""
db_task = Task(
title=task.title,
description=task.description,
status=task.status,
priority=task.priority,
due_date=task.due_date
)
db.add(db_task)
db.commit()
db.refresh(db_task)
return db_task
def update_task(db: Session, task_id: int, task_update: TaskUpdate) -> Optional[Task]:
"""
Update a task.
"""
db_task = get_task(db, task_id)
if not db_task:
return None
# Convert Pydantic model to dict, excluding None values
update_data = task_update.dict(exclude_unset=True)
# Mark as completed if status changed to DONE
if "status" in update_data and update_data["status"] == TaskStatus.DONE and db_task.status != TaskStatus.DONE:
update_data["completed_at"] = datetime.utcnow()
# Apply updates
for key, value in update_data.items():
setattr(db_task, key, value)
db.commit()
db.refresh(db_task)
return db_task
def delete_task(db: Session, task_id: int) -> Optional[Task]:
"""
Soft delete a task by setting is_deleted to True.
"""
db_task = get_task(db, task_id)
if not db_task:
return None
db_task.is_deleted = True
db.commit()
return db_task
def permanently_delete_task(db: Session, task_id: int) -> bool:
"""
Permanently delete a task from the database.
"""
db_task = db.query(Task).filter(Task.id == task_id).first()
if not db_task:
return False
db.delete(db_task)
db.commit()
return True

0
app/db/__init__.py Normal file
View File

3
app/db/base.py Normal file
View File

@ -0,0 +1,3 @@
# Import all the models to ensure they are registered with SQLAlchemy
from app.db.base_class import Base # noqa
from app.models.task import Task # noqa

14
app/db/base_class.py Normal file
View File

@ -0,0 +1,14 @@
from typing import Any
from sqlalchemy.ext.declarative import as_declarative, declared_attr
@as_declarative()
class Base:
id: Any
__name__: str
# Generate __tablename__ automatically based on class name
@declared_attr
def __tablename__(cls) -> str:
return cls.__name__.lower()

27
app/db/session.py Normal file
View File

@ -0,0 +1,27 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from app.core.config import settings
# Ensure DB directory exists
settings.DB_DIR.mkdir(parents=True, exist_ok=True)
SQLALCHEMY_DATABASE_URL = f"sqlite:///{settings.DB_DIR}/db.sqlite"
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()

3
app/models/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from app.models.task import Task, TaskPriority, TaskStatus
__all__ = ["Task", "TaskPriority", "TaskStatus"]

20
app/models/base.py Normal file
View File

@ -0,0 +1,20 @@
from datetime import datetime
from typing import Any
from sqlalchemy import Column, DateTime
from sqlalchemy.ext.declarative import as_declarative, declared_attr
@as_declarative()
class Base:
id: Any
__name__: str
# Generate __tablename__ automatically based on class name
@declared_attr
def __tablename__(cls) -> str:
return cls.__name__.lower()
# Add created_at and updated_at columns to all models
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)

33
app/models/task.py Normal file
View File

@ -0,0 +1,33 @@
from datetime import datetime
from typing import Optional
from enum import Enum as PyEnum
from sqlalchemy import Boolean, Column, DateTime, Enum, Integer, String, Text, ForeignKey
from sqlalchemy.orm import relationship
from app.db.base_class import Base
class TaskPriority(str, PyEnum):
LOW = "low"
MEDIUM = "medium"
HIGH = "high"
class TaskStatus(str, PyEnum):
TODO = "todo"
IN_PROGRESS = "in_progress"
DONE = "done"
class Task(Base):
id = Column(Integer, primary_key=True, index=True)
title = Column(String(255), nullable=False, index=True)
description = Column(Text, nullable=True)
status = Column(Enum(TaskStatus), default=TaskStatus.TODO, nullable=False)
priority = Column(Enum(TaskPriority), default=TaskPriority.MEDIUM, nullable=False)
due_date = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
completed_at = Column(DateTime, nullable=True)
is_deleted = Column(Boolean, default=False, nullable=False)

16
app/schemas/__init__.py Normal file
View File

@ -0,0 +1,16 @@
from app.schemas.task import Task, TaskBase, TaskCreate, TaskInDB, TaskUpdate
from app.schemas.errors import HTTPError, HTTPValidationError, ValidationError
from app.schemas.pagination import PaginatedResponse, PaginationParams
__all__ = [
"Task",
"TaskBase",
"TaskCreate",
"TaskInDB",
"TaskUpdate",
"HTTPError",
"HTTPValidationError",
"ValidationError",
"PaginatedResponse",
"PaginationParams",
]

17
app/schemas/errors.py Normal file
View File

@ -0,0 +1,17 @@
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field
class HTTPError(BaseModel):
detail: str = Field(..., description="Error message")
class ValidationError(BaseModel):
loc: List[str] = Field(..., description="Location of validation error")
msg: str = Field(..., description="Validation error message")
type: str = Field(..., description="Validation error type")
class HTTPValidationError(BaseModel):
detail: List[ValidationError] = Field(..., description="Validation errors")

17
app/schemas/pagination.py Normal file
View File

@ -0,0 +1,17 @@
from typing import Generic, List, Optional, TypeVar
from pydantic import BaseModel, Field
T = TypeVar('T')
class PaginationParams(BaseModel):
skip: int = Field(0, ge=0, description="Number of items to skip")
limit: int = Field(100, gt=0, le=1000, description="Maximum number of items to return")
class PaginatedResponse(BaseModel, Generic[T]):
items: List[T] = Field(..., description="List of items")
total: int = Field(..., description="Total number of items")
skip: int = Field(..., description="Number of items skipped")
limit: int = Field(..., description="Maximum number of items returned")

47
app/schemas/task.py Normal file
View File

@ -0,0 +1,47 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field
from app.models.task import TaskPriority, TaskStatus
# Base schema with common attributes
class TaskBase(BaseModel):
title: str = Field(..., min_length=1, max_length=255, description="Task title")
description: Optional[str] = Field(None, description="Task description")
status: TaskStatus = Field(default=TaskStatus.TODO, description="Task status")
priority: TaskPriority = Field(default=TaskPriority.MEDIUM, description="Task priority")
due_date: Optional[datetime] = Field(None, description="Task due date")
# Schema for creating a new task
class TaskCreate(TaskBase):
pass
# Schema for updating an existing task
class TaskUpdate(BaseModel):
title: Optional[str] = Field(None, min_length=1, max_length=255, description="Task title")
description: Optional[str] = Field(None, description="Task description")
status: Optional[TaskStatus] = Field(None, description="Task status")
priority: Optional[TaskPriority] = Field(None, description="Task priority")
due_date: Optional[datetime] = Field(None, description="Task due date")
is_deleted: Optional[bool] = Field(None, description="Mark task as deleted")
# Schema for task responses
class TaskInDB(TaskBase):
id: int
created_at: datetime
updated_at: datetime
completed_at: Optional[datetime] = None
is_deleted: bool = False
class Config:
from_attributes = True # for Pydantic v2 compatibility
# Schema for task list responses
class Task(TaskInDB):
pass

39
main.py Normal file
View File

@ -0,0 +1,39 @@
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.core.config import settings
from app.api.v1.api import api_router
app = FastAPI(
title=settings.PROJECT_NAME,
openapi_url="/openapi.json",
docs_url="/docs",
redoc_url="/redoc",
)
# Set up CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router)
@app.get("/health", tags=["Health"])
def health_check():
"""
Health check endpoint to verify the service is running.
"""
return {"status": "ok"}
if __name__ == "__main__":
uvicorn.run(
"main:app",
host="0.0.0.0",
port=8000,
reload=True,
)

0
migrations/__init__.py Normal file
View File

81
migrations/env.py Normal file
View File

@ -0,0 +1,81 @@
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
from app.db.base import Base
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True, # Important for SQLite
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
is_sqlite = connection.dialect.name == 'sqlite'
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=is_sqlite, # Important for SQLite
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

View File

@ -0,0 +1,49 @@
"""initial migration
Revision ID: 0001
Revises:
Create Date: 2023-10-24
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# Create the task table
op.create_table(
'task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('status', sa.Enum('todo', 'in_progress', 'done', name='taskstatus'), nullable=False),
sa.Column('priority', sa.Enum('low', 'medium', 'high', name='taskpriority'), nullable=False),
sa.Column('due_date', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('is_deleted', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False)
op.create_index(op.f('ix_task_title'), 'task', ['title'], unique=False)
def downgrade():
# Drop indexes
op.drop_index(op.f('ix_task_title'), table_name='task')
op.drop_index(op.f('ix_task_id'), table_name='task')
# Drop task table
op.drop_table('task')
# Drop enums
op.execute('DROP TYPE taskstatus')
op.execute('DROP TYPE taskpriority')

8
requirements.txt Normal file
View File

@ -0,0 +1,8 @@
fastapi>=0.104.0
uvicorn>=0.24.0
pydantic>=2.4.2
pydantic-settings>=2.0.3
sqlalchemy>=2.0.22
alembic>=1.12.0
python-dotenv>=1.0.0
ruff>=0.1.1