Implement one-time secret sharing service API
This commit is contained in:
parent
c4bb099eee
commit
3b335fce18
151
README.md
151
README.md
@ -1,3 +1,150 @@
|
||||
# FastAPI Application
|
||||
# One-Time Secret Sharing Service
|
||||
|
||||
This is a FastAPI application bootstrapped by BackendIM, the AI-powered backend generation platform.
|
||||
A secure API backend for a one-time secret sharing service built with FastAPI and SQLite.
|
||||
|
||||
## Features
|
||||
|
||||
- Create and share secrets securely
|
||||
- Secrets can only be viewed once
|
||||
- Automatic deletion after viewing or expiration
|
||||
- Customizable time-to-live (TTL) for secrets
|
||||
- Background cleanup of expired secrets
|
||||
- Encrypted storage of secrets
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Health Check
|
||||
|
||||
```
|
||||
GET /health
|
||||
```
|
||||
|
||||
Returns the health status of the API and database connection.
|
||||
|
||||
### Create a Secret
|
||||
|
||||
```
|
||||
POST /api/v1/secrets
|
||||
```
|
||||
|
||||
Create a new secret and get a token to access it.
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"content": "Your secret message here",
|
||||
"ttl_hours": 24 // Optional, defaults to 24 hours
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"token": "unique-token-id",
|
||||
"expires_at": "2023-07-28T10:00:00.000Z",
|
||||
"message": "Secret stored successfully"
|
||||
}
|
||||
```
|
||||
|
||||
### Retrieve a Secret
|
||||
|
||||
```
|
||||
GET /api/v1/secrets/{token}
|
||||
```
|
||||
|
||||
Retrieve a secret using its token. The secret is deleted immediately after retrieval.
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"content": "Your secret message here",
|
||||
"message": "Secret retrieved successfully"
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses:**
|
||||
- `404 Not Found` - Secret not found, already viewed, or expired
|
||||
|
||||
## Setup and Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.8+
|
||||
- SQLite
|
||||
|
||||
### Environment Variables
|
||||
|
||||
The application uses the following environment variables:
|
||||
|
||||
- `SECRET_KEY`: Secret key for encryption (defaults to "development_secret_key" in development)
|
||||
|
||||
### Installation
|
||||
|
||||
1. Clone the repository
|
||||
2. Create a virtual environment and activate it:
|
||||
```bash
|
||||
python -m venv venv
|
||||
source venv/bin/activate # On Windows: venv\Scripts\activate
|
||||
```
|
||||
3. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### Database Setup
|
||||
|
||||
Run Alembic migrations to set up the database:
|
||||
|
||||
```bash
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
### Running the Application
|
||||
|
||||
Start the application with uvicorn:
|
||||
|
||||
```bash
|
||||
uvicorn main:app --host 0.0.0.0 --port 8000 --reload
|
||||
```
|
||||
|
||||
The API will be available at `http://localhost:8000`.
|
||||
|
||||
- API Documentation: `http://localhost:8000/docs`
|
||||
- ReDoc Documentation: `http://localhost:8000/redoc`
|
||||
- OpenAPI Specification: `http://localhost:8000/openapi.json`
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Secrets are encrypted before storage
|
||||
- Secrets are automatically deleted after viewing
|
||||
- Expired secrets are cleaned up regularly
|
||||
- The service uses a TTL (time-to-live) mechanism to ensure secrets don't persist indefinitely
|
||||
|
||||
## Development
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
.
|
||||
├── alembic.ini # Alembic configuration
|
||||
├── app/ # Application code
|
||||
│ ├── api/ # API endpoints
|
||||
│ │ └── routes/ # Route definitions
|
||||
│ ├── core/ # Core application code
|
||||
│ │ └── config.py # Application configuration
|
||||
│ ├── db/ # Database related code
|
||||
│ │ └── session.py # Database session setup
|
||||
│ ├── models/ # SQLAlchemy models
|
||||
│ │ ├── base.py # Base model
|
||||
│ │ └── secret.py # Secret model
|
||||
│ ├── schemas/ # Pydantic schemas
|
||||
│ │ └── secret.py # Secret schemas
|
||||
│ └── services/ # Business logic
|
||||
│ ├── encryption.py # Encryption service
|
||||
│ └── secret_service.py # Secret service
|
||||
├── main.py # Application entry point
|
||||
├── migrations/ # Alembic migrations
|
||||
│ └── versions/ # Migration scripts
|
||||
├── README.md # This file
|
||||
└── requirements.txt # Python dependencies
|
||||
```
|
106
alembic.ini
Normal file
106
alembic.ini
Normal file
@ -0,0 +1,106 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# SQLite URL using absolute path
|
||||
sqlalchemy.url = sqlite:////app/storage/db/db.sqlite
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
1
app/__init__.py
Normal file
1
app/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# App package
|
1
app/api/__init__.py
Normal file
1
app/api/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# API package
|
1
app/api/routes/__init__.py
Normal file
1
app/api/routes/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Routes package
|
28
app/api/routes/health.py
Normal file
28
app/api/routes/health.py
Normal file
@ -0,0 +1,28 @@
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.session import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health", status_code=status.HTTP_200_OK)
|
||||
def health_check(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Health check endpoint.
|
||||
Returns status of the API and database connection.
|
||||
"""
|
||||
health_status = {
|
||||
"status": "healthy",
|
||||
"api": "up",
|
||||
"database": "up",
|
||||
}
|
||||
|
||||
# Test database connection
|
||||
try:
|
||||
# Execute a simple query
|
||||
db.execute("SELECT 1")
|
||||
except Exception as e:
|
||||
health_status["status"] = "unhealthy"
|
||||
health_status["database"] = f"down: {str(e)}"
|
||||
|
||||
return health_status
|
75
app/api/routes/secrets.py
Normal file
75
app/api/routes/secrets.py
Normal file
@ -0,0 +1,75 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, status
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.session import get_db
|
||||
from app.schemas.secret import SecretCreate, SecretResponse, SecretRetrieved
|
||||
from app.services.secret_service import create_secret, retrieve_and_delete_secret, cleanup_expired_secrets
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/secrets", response_model=SecretResponse)
|
||||
def create_new_secret(
|
||||
secret: SecretCreate,
|
||||
background_tasks: BackgroundTasks,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Create a new secret.
|
||||
|
||||
Args:
|
||||
secret: The secret data to store
|
||||
background_tasks: FastAPI background tasks
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
The token to access the secret
|
||||
"""
|
||||
# Add a background task to clean up expired secrets
|
||||
background_tasks.add_task(cleanup_expired_secrets, db)
|
||||
|
||||
# Create the secret
|
||||
secret_obj = create_secret(
|
||||
db=db,
|
||||
content=secret.content,
|
||||
ttl_hours=secret.ttl_hours
|
||||
)
|
||||
|
||||
# Return the token
|
||||
return {
|
||||
"token": secret_obj.id,
|
||||
"expires_at": secret_obj.expires_at,
|
||||
"message": "Secret stored successfully"
|
||||
}
|
||||
|
||||
|
||||
@router.get("/secrets/{token}", response_model=SecretRetrieved)
|
||||
def get_secret(token: str, background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Retrieve a secret by its token and delete it.
|
||||
|
||||
Args:
|
||||
token: The secret token
|
||||
background_tasks: FastAPI background tasks
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
The secret content
|
||||
"""
|
||||
# Add a background task to clean up expired secrets
|
||||
background_tasks.add_task(cleanup_expired_secrets, db)
|
||||
|
||||
# Retrieve and delete the secret
|
||||
content, message = retrieve_and_delete_secret(db, token)
|
||||
|
||||
# Check if retrieval was successful
|
||||
if not content:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=message
|
||||
)
|
||||
|
||||
# Return the secret content
|
||||
return {
|
||||
"content": content,
|
||||
"message": message
|
||||
}
|
1
app/core/__init__.py
Normal file
1
app/core/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Core package
|
37
app/core/config.py
Normal file
37
app/core/config.py
Normal file
@ -0,0 +1,37 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from pydantic import field_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
# Project directories
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.resolve()
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# Project info
|
||||
PROJECT_NAME: str = "One-Time Secret Sharing Service"
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database
|
||||
DB_DIR: Path = Path("/app") / "storage" / "db"
|
||||
SQLALCHEMY_DATABASE_URL: str = f"sqlite:///{DB_DIR}/db.sqlite"
|
||||
|
||||
# Secret settings
|
||||
DEFAULT_SECRET_TTL_HOURS: int = 24
|
||||
MAX_SECRET_TTL_HOURS: int = 168 # 7 days
|
||||
|
||||
# Security
|
||||
SECRET_KEY: str = os.environ.get("SECRET_KEY", "development_secret_key")
|
||||
|
||||
@field_validator("DB_DIR")
|
||||
def create_db_dir(cls, db_dir: Path) -> Path:
|
||||
db_dir.mkdir(parents=True, exist_ok=True)
|
||||
return db_dir
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
# Create global settings instance
|
||||
settings = Settings()
|
1
app/db/__init__.py
Normal file
1
app/db/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Database package
|
26
app/db/session.py
Normal file
26
app/db/session.py
Normal file
@ -0,0 +1,26 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
# Ensure DB directory exists
|
||||
DB_DIR = Path("/app") / "storage" / "db"
|
||||
DB_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_DIR}/db.sqlite"
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False} # Needed for SQLite
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
# Dependency to get DB session
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
5
app/models/__init__.py
Normal file
5
app/models/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# Import models here for easy access
|
||||
from app.models.base import Base as Base
|
||||
from app.models.secret import Secret as Secret
|
||||
|
||||
__all__ = ["Base", "Secret"]
|
3
app/models/base.py
Normal file
3
app/models/base.py
Normal file
@ -0,0 +1,3 @@
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
57
app/models/secret.py
Normal file
57
app/models/secret.py
Normal file
@ -0,0 +1,57 @@
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import Column, String, DateTime, Text, Boolean
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from app.models.base import Base
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
class Secret(Base):
|
||||
"""SQLAlchemy model for storing secrets."""
|
||||
__tablename__ = "secrets"
|
||||
|
||||
# Primary key - unique identifier for the secret
|
||||
id = Column(String(36), primary_key=True, index=True, default=lambda: str(uuid.uuid4()))
|
||||
|
||||
# The encrypted secret content
|
||||
content = Column(Text, nullable=False)
|
||||
|
||||
# When the secret was created
|
||||
created_at = Column(DateTime, default=func.now(), nullable=False)
|
||||
|
||||
# When the secret expires
|
||||
expires_at = Column(DateTime, nullable=False)
|
||||
|
||||
# Whether the secret has been viewed
|
||||
is_viewed = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
@classmethod
|
||||
def create_with_ttl(cls, content: str, ttl_hours: int = None):
|
||||
"""
|
||||
Create a new secret with TTL (time-to-live).
|
||||
|
||||
Args:
|
||||
content: The secret content to store
|
||||
ttl_hours: Time-to-live in hours (default: settings.DEFAULT_SECRET_TTL_HOURS)
|
||||
|
||||
Returns:
|
||||
A new Secret instance
|
||||
"""
|
||||
if ttl_hours is None:
|
||||
ttl_hours = settings.DEFAULT_SECRET_TTL_HOURS
|
||||
|
||||
# Ensure TTL doesn't exceed maximum
|
||||
ttl_hours = min(ttl_hours, settings.MAX_SECRET_TTL_HOURS)
|
||||
|
||||
# Calculate expiration time
|
||||
expires_at = datetime.utcnow() + timedelta(hours=ttl_hours)
|
||||
|
||||
return cls(
|
||||
content=content,
|
||||
expires_at=expires_at
|
||||
)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if the secret is expired."""
|
||||
return datetime.utcnow() > self.expires_at
|
6
app/schemas/__init__.py
Normal file
6
app/schemas/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
# Import schemas for easy access
|
||||
from app.schemas.secret import SecretCreate as SecretCreate
|
||||
from app.schemas.secret import SecretResponse as SecretResponse
|
||||
from app.schemas.secret import SecretRetrieved as SecretRetrieved
|
||||
|
||||
__all__ = ["SecretCreate", "SecretResponse", "SecretRetrieved"]
|
38
app/schemas/secret.py
Normal file
38
app/schemas/secret.py
Normal file
@ -0,0 +1,38 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
class SecretCreate(BaseModel):
|
||||
"""Schema for creating a new secret."""
|
||||
content: str = Field(..., description="The secret content to store")
|
||||
ttl_hours: Optional[int] = Field(
|
||||
default=settings.DEFAULT_SECRET_TTL_HOURS,
|
||||
description="Time-to-live in hours"
|
||||
)
|
||||
|
||||
@field_validator("ttl_hours")
|
||||
def validate_ttl(cls, ttl_hours: int) -> int:
|
||||
"""Validate TTL doesn't exceed maximum."""
|
||||
if ttl_hours <= 0:
|
||||
raise ValueError("TTL must be greater than 0")
|
||||
|
||||
if ttl_hours > settings.MAX_SECRET_TTL_HOURS:
|
||||
return settings.MAX_SECRET_TTL_HOURS
|
||||
|
||||
return ttl_hours
|
||||
|
||||
|
||||
class SecretResponse(BaseModel):
|
||||
"""Schema for response after creating a secret."""
|
||||
token: str = Field(..., description="The token to access the secret")
|
||||
expires_at: datetime = Field(..., description="When the secret expires")
|
||||
message: str = Field("Secret stored successfully", description="Status message")
|
||||
|
||||
|
||||
class SecretRetrieved(BaseModel):
|
||||
"""Schema for retrieved secret."""
|
||||
content: str = Field(..., description="The secret content")
|
||||
message: str = Field("Secret retrieved successfully", description="Status message")
|
1
app/services/__init__.py
Normal file
1
app/services/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Import services for easy access
|
45
app/services/encryption.py
Normal file
45
app/services/encryption.py
Normal file
@ -0,0 +1,45 @@
|
||||
from cryptography.fernet import Fernet
|
||||
from base64 import urlsafe_b64encode
|
||||
from app.core.config import settings
|
||||
|
||||
# Generate a key from the secret key
|
||||
def get_fernet_key():
|
||||
"""Generate a Fernet key from the application secret key."""
|
||||
# Convert secret key to bytes and ensure it's 32 bytes
|
||||
key_bytes = settings.SECRET_KEY.encode()
|
||||
# Pad or truncate to 32 bytes
|
||||
key_bytes = key_bytes.ljust(32, b'0')[:32]
|
||||
# Convert to URL-safe base64-encoded bytes (Fernet requirement)
|
||||
return urlsafe_b64encode(key_bytes)
|
||||
|
||||
|
||||
# Initialize Fernet with the key
|
||||
_fernet = Fernet(get_fernet_key())
|
||||
|
||||
|
||||
def encrypt_secret(secret: str) -> str:
|
||||
"""
|
||||
Encrypt a secret string.
|
||||
|
||||
Args:
|
||||
secret: The secret string to encrypt
|
||||
|
||||
Returns:
|
||||
The encrypted secret as a string
|
||||
"""
|
||||
encrypted = _fernet.encrypt(secret.encode())
|
||||
return encrypted.decode()
|
||||
|
||||
|
||||
def decrypt_secret(encrypted_secret: str) -> str:
|
||||
"""
|
||||
Decrypt an encrypted secret string.
|
||||
|
||||
Args:
|
||||
encrypted_secret: The encrypted secret string
|
||||
|
||||
Returns:
|
||||
The decrypted secret as a string
|
||||
"""
|
||||
decrypted = _fernet.decrypt(encrypted_secret.encode())
|
||||
return decrypted.decode()
|
107
app/services/secret_service.py
Normal file
107
app/services/secret_service.py
Normal file
@ -0,0 +1,107 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple
|
||||
from sqlalchemy.orm import Session
|
||||
from app.models.secret import Secret
|
||||
from app.services.encryption import encrypt_secret, decrypt_secret
|
||||
|
||||
|
||||
def create_secret(db: Session, content: str, ttl_hours: Optional[int] = None) -> Secret:
|
||||
"""
|
||||
Create a new secret in the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
content: Secret content to store
|
||||
ttl_hours: Time-to-live in hours
|
||||
|
||||
Returns:
|
||||
The created Secret instance
|
||||
"""
|
||||
# Encrypt the content
|
||||
encrypted_content = encrypt_secret(content)
|
||||
|
||||
# Create the secret
|
||||
secret = Secret.create_with_ttl(content=encrypted_content, ttl_hours=ttl_hours)
|
||||
|
||||
# Add to database
|
||||
db.add(secret)
|
||||
db.commit()
|
||||
db.refresh(secret)
|
||||
|
||||
return secret
|
||||
|
||||
|
||||
def get_secret_by_id(db: Session, secret_id: str) -> Optional[Secret]:
|
||||
"""
|
||||
Get a secret by its ID.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
secret_id: Secret ID
|
||||
|
||||
Returns:
|
||||
The Secret instance if found, None otherwise
|
||||
"""
|
||||
return db.query(Secret).filter(Secret.id == secret_id).first()
|
||||
|
||||
|
||||
def retrieve_and_delete_secret(db: Session, secret_id: str) -> Tuple[Optional[str], str]:
|
||||
"""
|
||||
Retrieve a secret and then delete it.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
secret_id: Secret ID
|
||||
|
||||
Returns:
|
||||
Tuple of (decrypted_content, message)
|
||||
"""
|
||||
# Get the secret
|
||||
secret = get_secret_by_id(db, secret_id)
|
||||
|
||||
# Check if it exists
|
||||
if not secret:
|
||||
return None, "Secret not found"
|
||||
|
||||
# Check if it's expired
|
||||
if secret.is_expired():
|
||||
# Delete the expired secret
|
||||
db.delete(secret)
|
||||
db.commit()
|
||||
return None, "Secret has expired"
|
||||
|
||||
# Check if it's already been viewed
|
||||
if secret.is_viewed:
|
||||
return None, "Secret has already been viewed"
|
||||
|
||||
# Decrypt the content
|
||||
decrypted_content = decrypt_secret(secret.content)
|
||||
|
||||
# Delete the secret
|
||||
db.delete(secret)
|
||||
db.commit()
|
||||
|
||||
return decrypted_content, "Secret retrieved successfully"
|
||||
|
||||
|
||||
def cleanup_expired_secrets(db: Session) -> int:
|
||||
"""
|
||||
Delete all expired secrets from the database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Number of secrets deleted
|
||||
"""
|
||||
now = datetime.utcnow()
|
||||
expired_secrets = db.query(Secret).filter(Secret.expires_at < now).all()
|
||||
|
||||
count = len(expired_secrets)
|
||||
|
||||
for secret in expired_secrets:
|
||||
db.delete(secret)
|
||||
|
||||
db.commit()
|
||||
|
||||
return count
|
35
main.py
Normal file
35
main.py
Normal file
@ -0,0 +1,35 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from app.api.routes import health, secrets
|
||||
from app.core.config import settings
|
||||
from app.db.session import engine
|
||||
from app.models import base
|
||||
|
||||
# Create database tables if they don't exist
|
||||
base.Base.metadata.create_all(bind=engine)
|
||||
|
||||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
description="One-time Secret Sharing Service API",
|
||||
version="1.0.0",
|
||||
openapi_url="/openapi.json",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include routers
|
||||
app.include_router(health.router, tags=["health"])
|
||||
app.include_router(secrets.router, prefix="/api/v1", tags=["secrets"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration with SQLite.
|
86
migrations/env.py
Normal file
86
migrations/env.py
Normal file
@ -0,0 +1,86 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Import models to ensure they are registered with SQLAlchemy
|
||||
from app.models.base import Base
|
||||
# Import Secret model to ensure it's registered
|
||||
# noqa: F401 is used to tell linters to ignore the unused import
|
||||
from app.models.secret import Secret # noqa: F401
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
is_sqlite = connection.dialect.name == 'sqlite'
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=is_sqlite, # Key configuration for SQLite
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
39
migrations/versions/initial_migration.py
Normal file
39
migrations/versions/initial_migration.py
Normal file
@ -0,0 +1,39 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 9daa2d43c98a
|
||||
Revises:
|
||||
Create Date: 2023-07-27 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9daa2d43c98a'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create secrets table
|
||||
op.create_table(
|
||||
'secrets',
|
||||
sa.Column('id', sa.String(36), primary_key=True, index=True),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('is_viewed', sa.Boolean(), nullable=False, default=False),
|
||||
)
|
||||
|
||||
# Create an index on expires_at for faster cleanup of expired secrets
|
||||
op.create_index('ix_secrets_expires_at', 'secrets', ['expires_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop the index
|
||||
op.drop_index('ix_secrets_expires_at', 'secrets')
|
||||
|
||||
# Drop the table
|
||||
op.drop_table('secrets')
|
10
requirements.txt
Normal file
10
requirements.txt
Normal file
@ -0,0 +1,10 @@
|
||||
fastapi>=0.96.0
|
||||
uvicorn>=0.22.0
|
||||
SQLAlchemy>=2.0.9
|
||||
alembic>=1.10.4
|
||||
python-dotenv>=1.0.0
|
||||
pydantic>=2.0.0
|
||||
ruff>=0.0.262
|
||||
cryptography>=40.0.2
|
||||
asyncio>=3.4.3
|
||||
python-multipart>=0.0.6
|
Loading…
x
Reference in New Issue
Block a user