Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions .github/workflows/security.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: security

on:
pull_request:
push:

jobs:
security-tests:
runs-on: ubuntu-latest
defaults:
run:
working-directory: backend
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"

- name: Install backend dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -r requirements.txt

- name: Run security regression tests
run: python -m pytest -m security
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ htmlcov/
.nox/
coverage/
.hypothesis/
backend/tests

# SQLite database files
*.sqlite
Expand Down
122 changes: 75 additions & 47 deletions backend/app/api/v1/endpoints/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,27 @@
logger = logging.getLogger(__name__)


def _token_debug_enabled() -> bool:
return settings.APP_ENV.lower() in {"dev", "development", "test", "local"}


def _token_preview(token: Optional[str], keep: int = 6) -> str:
if not token:
return "None"
if len(token) <= keep * 2:
return "***"
return f"{token[:keep]}...{token[-keep:]}"


def _require_dev_mode() -> None:
env = settings.APP_ENV.lower()
if env not in {"dev", "development", "test", "local"}:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Endpoint not available",
)


def _log_auth_event_background(request: Request, event_type: str, success: bool, user_id: str = None, reason: str = None):
"""Schedule audit log write in background."""
import asyncio
Expand Down Expand Up @@ -57,6 +78,8 @@ async def _write():
def log_token_info(token, token_type="access", mask=True):
"""Log token information for debugging purposes."""
try:
if not _token_debug_enabled():
return
# Decode without verification to extract payload for logging
parts = token.split(".")
if len(parts) != 3:
Expand Down Expand Up @@ -89,10 +112,10 @@ def log_token_info(token, token_type="access", mask=True):
)

# Mask the token for security
if mask and token:
token_preview = f"{token[:10]}...{token[-5:]}" if len(token) > 15 else "***"
if mask:
token_preview = _token_preview(token, keep=10)
else:
token_preview = token
token_preview = "***"

logger.info(
f"{token_type.capitalize()} token info: "
Expand All @@ -106,15 +129,17 @@ def log_token_info(token, token_type="access", mask=True):
def get_cookie_options(path: str = "/") -> dict:
"""Get cookie options based on environment."""
# Determine if we're in development or production
is_dev = settings.APP_ENV.lower() == "dev"
is_dev = settings.APP_ENV.lower() in {"dev", "development", "test", "local"}
secure = settings.COOKIE_SECURE if settings.COOKIE_SECURE is not None else not is_dev
samesite = (settings.COOKIE_SAMESITE or "lax").lower()
if samesite == "none":
secure = True

return {
"key": "refresh_token",
"httponly": True, # Prevent JavaScript access
"secure": not is_dev, # Only set secure=True in production (HTTPS)
"samesite": (
"lax" if is_dev else "none"
), # Use 'lax' for dev, 'none' for production
"secure": secure,
"samesite": samesite,
"max_age": settings.REFRESH_TOKEN_EXPIRE_DAYS
* 24
* 60
Expand Down Expand Up @@ -190,6 +215,8 @@ def clear_refresh_token_systematically(response: Response, request: Request):
def log_token_validation(token: str, user_id: str, source: str, result: str):
"""Enhanced logging for token validation"""
try:
if not _token_debug_enabled():
return
# Import json and base64 here to ensure they're available in this scope
import json
import base64
Expand Down Expand Up @@ -362,8 +389,14 @@ async def login(

# Log detailed information about the token being stored
logger.info(f"Storing refresh token in database for user {user.email}")
logger.info(f"Token (first 10 chars): {refresh_token[:10]}")
logger.info(f"Token length: {len(refresh_token)}")
if _token_debug_enabled():
logger.debug(
"Refresh token stored",
extra={
"token_preview": _token_preview(refresh_token),
"token_length": len(refresh_token),
},
)
logger.info(f"Token expires: {user.refresh_token_expires}")

# Save the user with the new refresh token
Expand All @@ -375,12 +408,10 @@ async def login(
logger.info(f"Refresh token successfully stored in database")
else:
logger.error(f"Failed to store refresh token in database")
if updated_user:
logger.error(
f"Stored token (first 10 chars): {updated_user.refresh_token[:10] if updated_user.refresh_token else 'None'}"
)
logger.error(
f"Stored token length: {len(updated_user.refresh_token) if updated_user.refresh_token else 0}"
if updated_user and _token_debug_enabled():
logger.debug(
"Stored refresh token mismatch",
extra={"token_length": len(updated_user.refresh_token) if updated_user.refresh_token else 0},
)

# Set refresh token cookie
Expand Down Expand Up @@ -461,21 +492,22 @@ async def refresh_token(
"""Refresh access token using a valid refresh token from HTTP-only cookie."""
# EMERGENCY FIX: Wrap the entire function in a try-except block to ensure it always returns a response
try:
# Log all cookies for debugging
logger.info(f"Cookies in request: {request.cookies}")
if _token_debug_enabled():
logger.debug("Cookies received", extra={"cookie_keys": list(request.cookies.keys())})

# Get refresh token from cookie
refresh_token = request.cookies.get("refresh_token")

# DETAILED TOKEN LOGGING
if refresh_token:
logger.info(f"🔍 REFRESH TOKEN RECEIVED:")
logger.info(f" Token (first 50 chars): {refresh_token[:50]}...")
logger.info(f" Token (last 50 chars): ...{refresh_token[-50:]}")
logger.info(f" Token length: {len(refresh_token)}")
logger.info(f" Full token: {refresh_token}")
else:
logger.info("❌ NO REFRESH TOKEN IN REQUEST")
if refresh_token and _token_debug_enabled():
logger.debug(
"Refresh token received",
extra={
"token_preview": _token_preview(refresh_token, keep=8),
"token_length": len(refresh_token),
},
)
elif not refresh_token:
logger.info("No refresh token in request")

# NUCLEAR OPTION: Block the specific problematic token immediately
BLOCKED_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiOTExODgwMTMyNWQ0MDdmYWIzM2E2Zjc5YmQyNGRhZCIsInJlZnJlc2giOnRydWUsImlhdCI6MTc1NTQ2NjQxMC41MzM1MDEsImV4cCI6MTc1ODA0NDAxMH0.0lVRx8qHILYv3IaaaMWNLDdKx_5ANTp4vMiAGuC_Hzg"
Expand Down Expand Up @@ -744,14 +776,7 @@ async def refresh_token(
)

# Verify that the refresh token matches what's in the database
# Log detailed token information for debugging
logger.info(f"Comparing tokens:")
logger.info(
f"DB token (first 10 chars): {user.refresh_token[:10] if user.refresh_token else 'None'}"
)
logger.info(
f"Request token (first 10 chars): {refresh_token[:10] if refresh_token else 'None'}"
)
logger.info("Comparing refresh token against stored value")

# Check if the user has a refresh token in the database
if not user.refresh_token:
Expand All @@ -767,9 +792,6 @@ async def refresh_token(
logger.info(f"Updated user record with token from request")
elif user.refresh_token != refresh_token:
logger.warning(f"Token mismatch between database and request")
logger.warning(
f"DB token length: {len(user.refresh_token)}, Request token length: {len(refresh_token)}"
)

# For now, update the database with the token from the request
# This is a temporary fix to help diagnose the issue
Expand Down Expand Up @@ -867,8 +889,14 @@ async def refresh_token(
logger.info(
f"New refresh token generated and about to be stored in database"
)
logger.info(f"New token (first 10 chars): {new_refresh_token[:10]}")
logger.info(f"New token length: {len(new_refresh_token)}")
if _token_debug_enabled():
logger.debug(
"New refresh token metadata",
extra={
"token_preview": _token_preview(new_refresh_token),
"token_length": len(new_refresh_token),
},
)

# Save the user with the new refresh token
await user.save(db)
Expand All @@ -879,7 +907,6 @@ async def refresh_token(
logger.info(
"Continuing despite database error (tokens will still be returned to client)"
)
logger.info(f"New token length: {len(new_refresh_token)}")
logger.info(f"Token expires at: {expiry_time.isoformat()}")
logger.info(f"Token lifetime: {settings.REFRESH_TOKEN_EXPIRE_DAYS} days")

Expand All @@ -892,12 +919,10 @@ async def refresh_token(
logger.info(f"New refresh token successfully stored in database")
else:
logger.error(f"Failed to store new refresh token in database")
if updated_user:
logger.error(
f"Stored token (first 10 chars): {updated_user.refresh_token[:10] if updated_user.refresh_token else 'None'}"
)
logger.error(
f"Stored token length: {len(updated_user.refresh_token) if updated_user.refresh_token else 0}"
if updated_user and _token_debug_enabled():
logger.debug(
"Stored refresh token mismatch after rotation",
extra={"token_length": len(updated_user.refresh_token) if updated_user.refresh_token else 0},
)

# Set new refresh token cookie - use root path to ensure cookie is sent with all requests
Expand Down Expand Up @@ -1195,6 +1220,7 @@ async def update_password(
@router.post("/nuclear-clear-cookies")
async def nuclear_clear_cookies(request: Request, response: Response):
"""
_require_dev_mode()
Nuclear option: Clear ALL cookies with every possible domain/path combination.
This addresses shared development environment cookie pollution issues.
"""
Expand Down Expand Up @@ -1338,6 +1364,7 @@ async def force_logout_clear(request: Request, response: Response):
Force logout and aggressive cookie clearing with cache-busting headers.
This is the nuclear option for persistent cookie issues.
"""
_require_dev_mode()

logger.info("🚨 FORCE LOGOUT AND CLEAR INITIATED")

Expand Down Expand Up @@ -1488,6 +1515,7 @@ async def force_logout_clear(request: Request, response: Response):
@router.post("/test-cookie-setting")
async def test_cookie_setting(response: Response):
"""Test endpoint to verify cookie setting works"""
_require_dev_mode()

logger.info("🧪 Testing cookie setting...")

Expand Down
13 changes: 6 additions & 7 deletions backend/app/api/v1/endpoints/navigation_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,8 +565,8 @@ async def delete_navigation_route(
print(f"Attempting to delete route with ID: {route_id_str}")

# Get existing route using raw SQL to avoid ORM issues
query = text(f"SELECT * FROM navigation_routes WHERE id = '{route_id_str}'")
result = await db.execute(query)
query = text("SELECT * FROM navigation_routes WHERE id = :route_id")
result = await db.execute(query, {"route_id": route_id_str})
route_data = result.fetchone()

if not route_data:
Expand All @@ -584,8 +584,8 @@ async def delete_navigation_route(
)

# Check if route has associated pages using raw SQL
pages_query = text(f"SELECT COUNT(*) FROM pages WHERE navigation_route_id = '{route_id_str}'")
pages_result = await db.execute(pages_query)
pages_query = text("SELECT COUNT(*) FROM pages WHERE navigation_route_id = :route_id")
pages_result = await db.execute(pages_query, {"route_id": route_id_str})
page_count = pages_result.scalar()

if page_count > 0:
Expand All @@ -595,8 +595,8 @@ async def delete_navigation_route(
)

# Delete route using raw SQL
delete_query = text(f"DELETE FROM navigation_routes WHERE id = '{route_id_str}'")
await db.execute(delete_query)
delete_query = text("DELETE FROM navigation_routes WHERE id = :route_id")
await db.execute(delete_query, {"route_id": route_id_str})
await db.commit()

print(f"Successfully deleted route with ID: {route_id_str}")
Expand All @@ -612,4 +612,3 @@ async def delete_navigation_route(
detail=f"Failed to delete navigation route: {str(e)}"
)


Loading
Loading