Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
327aef1
Add pytest config and test fixtures for the backend
ambv Mar 16, 2026
95c7277
Add tests for health, commits, binaries, and environments endpoints
ambv Mar 16, 2026
77a1361
Add tests for benchmarks, diff, trends, and flamegraph endpoints
ambv Mar 16, 2026
3ae11f1
Add tests for upload and memray failure reporting endpoints
ambv Mar 16, 2026
cb94426
Add tests for token authentication
ambv Mar 16, 2026
6aa20e6
Add tests for public endpoints and logging utilities
ambv Mar 16, 2026
5a02cf0
Expand upload tests and fix coverage tracking
ambv Mar 16, 2026
888bcdc
Document how to run backend tests and describe the test architecture
ambv Mar 16, 2026
7d1142e
Add backend tests to CI, triggered only on backend/ changes
ambv Mar 16, 2026
bf30fa5
Add tests using production data fixtures
ambv Mar 16, 2026
78d2cce
Merge remote-tracking branch 'origin/main' into backend-tests
ambv Mar 16, 2026
3490416
Address review feedback and fix missing commit in upload endpoint
ambv Mar 16, 2026
2f212c7
Use asyncpg constraint_name for duplicate upload detection
ambv Mar 16, 2026
d9baef0
backend: remove deprecation warnings
pablogsal Mar 17, 2026
3b8dae5
backend: fix ruff issues
pablogsal Mar 17, 2026
bfdab4f
ci: run backend ruff
pablogsal Mar 17, 2026
ca0c66b
backend: remove unused imports
pablogsal Mar 17, 2026
27ba6e1
backend: clean up scripts
pablogsal Mar 17, 2026
01f9799
ci: improve backend change detection
pablogsal Mar 17, 2026
4cd552d
backend: clean up unused code paths
pablogsal Mar 17, 2026
0d8f838
tests: remove redundant asyncio markers
pablogsal Mar 17, 2026
712a943
backend: lock ruff in dev requirements
pablogsal Mar 17, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,45 @@ jobs:
exit 1
fi

backend-tests:
name: Backend tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for backend changes
id: changes
run: |
if [ "${{ github.event_name }}" = "push" ]; then
CHANGED=$(git diff --name-only ${{ github.event.before }} ${{ github.sha }} 2>/dev/null \
|| git diff --name-only HEAD~1 HEAD 2>/dev/null \
|| echo "backend/")
else
CHANGED=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }})
fi
if echo "$CHANGED" | grep -Eq '^(backend/|\.github/workflows/ci\.yml$)'; then
echo "backend=true" >> "$GITHUB_OUTPUT"
fi
- uses: actions/setup-python@v5
if: steps.changes.outputs.backend == 'true'
with:
python-version: "3.13"
cache: pip
cache-dependency-path: backend/requirements-dev.txt
- name: Install backend dependencies
if: steps.changes.outputs.backend == 'true'
working-directory: backend
run: python -m pip install -r requirements-dev.txt
- name: Run Ruff
if: steps.changes.outputs.backend == 'true'
working-directory: backend
run: python -m ruff check .
- name: Run tests
if: steps.changes.outputs.backend == 'true'
working-directory: backend
run: python -m pytest tests/ -v --cov=app --cov-report=term-missing

frontend-lint:
name: Frontend Lint & Typecheck
runs-on: ubuntu-latest
Expand Down
22 changes: 17 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,28 @@ Services start automatically with hot reload:
## Development Commands

### Testing

```bash
# Via Docker (recommended)
# Backend tests
docker compose -f docker-compose.dev.yml exec backend python -m pytest tests/ -v

# With coverage report
docker compose -f docker-compose.dev.yml exec backend python -m pytest tests/ --cov=app --cov-report=term-missing

# Frontend checks
docker compose -f docker-compose.dev.yml exec frontend npm run lint
docker compose -f docker-compose.dev.yml exec frontend npm run typecheck

# Or locally in the frontend directory
npm run lint # ESLint (must pass with zero errors)
npm run typecheck # TypeScript type checking
```

Backend tests use an in-memory SQLite database, independent of the
PostgreSQL instance used in development. Each test gets a fresh database
with empty tables. Fixtures in `backend/tests/conftest.py` provide
pre-built model instances (commits, binaries, environments, runs,
benchmark results, auth tokens) that tests can depend on as needed.
Requests go through `httpx.AsyncClient` with FastAPI's ASGI transport,
so the full request/response cycle (middleware, dependency injection,
validation) is exercised without a running server.

Both checks run in CI on pushes to `main` and on pull requests.

### Populating Mock Data
Expand Down
2 changes: 2 additions & 0 deletions backend/.coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[run]
concurrency = greenlet
7 changes: 3 additions & 4 deletions backend/app/admin_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@

from .database import get_database
from .models import AdminSession
from .oauth import github_oauth, GitHubUser
from .config import get_settings
from .oauth import GitHubUser

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -52,7 +51,7 @@ async def get_admin_session(
select(AdminSession).where(
and_(
AdminSession.session_token == session_token,
AdminSession.is_active == True,
AdminSession.is_active.is_(True),
AdminSession.expires_at > datetime.now(UTC).replace(tzinfo=None),
)
)
Expand All @@ -77,7 +76,7 @@ async def cleanup_expired_sessions(db: AsyncSession) -> None:
select(AdminSession).where(
and_(
AdminSession.expires_at <= datetime.now(UTC).replace(tzinfo=None),
AdminSession.is_active == True,
AdminSession.is_active.is_(True),
)
)
)
Expand Down
3 changes: 1 addition & 2 deletions backend/app/auth.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
"""Authentication utilities for the Memory Tracker API."""

from fastapi import Depends, HTTPException, status, Header
from fastapi import Depends, Header
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Annotated
import logging

from . import models, crud
from .database import get_database
Expand Down
3 changes: 1 addition & 2 deletions backend/app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
All settings are loaded from environment variables with sensible defaults.
"""

from typing import List, Optional, Union
from typing import List
from pydantic_settings import BaseSettings
from pydantic import field_validator
from functools import lru_cache


Expand Down
12 changes: 7 additions & 5 deletions backend/app/crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from sqlalchemy import select, desc, and_, func, text
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload, joinedload, contains_eager
from typing import List, Optional, Dict, Any
from datetime import datetime, UTC
import logging
Expand Down Expand Up @@ -257,7 +256,7 @@ async def create_benchmark_result(
allocation_histogram=result.result_json.allocation_histogram,
total_allocated_bytes=result.result_json.total_allocated_bytes,
top_allocating_functions=[
func.dict() for func in result.result_json.top_allocating_functions
func.model_dump() for func in result.result_json.top_allocating_functions
],
flamegraph_html=result.flamegraph_html,
)
Expand Down Expand Up @@ -411,7 +410,10 @@ async def get_auth_token_by_token(
"""Get an auth token by its token value."""
result = await db.execute(
select(models.AuthToken).where(
and_(models.AuthToken.token == token, models.AuthToken.is_active == True)
and_(
models.AuthToken.token == token,
models.AuthToken.is_active.is_(True),
)
)
)
return result.scalars().first()
Expand Down Expand Up @@ -465,7 +467,7 @@ async def get_admin_users(db: AsyncSession) -> List[models.AdminUser]:
"""Get all admin users."""
result = await db.execute(
select(models.AdminUser)
.where(models.AdminUser.is_active == True)
.where(models.AdminUser.is_active.is_(True))
.order_by(models.AdminUser.added_at)
)
return result.scalars().all()
Expand All @@ -479,7 +481,7 @@ async def get_admin_user_by_username(
select(models.AdminUser).where(
and_(
models.AdminUser.github_username == username,
models.AdminUser.is_active == True,
models.AdminUser.is_active.is_(True),
)
)
)
Expand Down
9 changes: 3 additions & 6 deletions backend/app/database.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from contextlib import asynccontextmanager
from typing import AsyncGenerator

import logging
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import OperationalError, StatementError
from .models import Base
from .config import get_settings
Expand Down Expand Up @@ -85,11 +87,6 @@ async def drop_tables():
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)


from contextlib import asynccontextmanager
from typing import AsyncGenerator


@asynccontextmanager
async def transaction_scope() -> AsyncGenerator[AsyncSession, None]:
"""
Expand Down
62 changes: 33 additions & 29 deletions backend/app/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import uuid
import time
import logging
from contextlib import asynccontextmanager
from typing import AsyncIterator
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware

Expand Down Expand Up @@ -31,13 +33,44 @@ def create_app(settings=None) -> FastAPI:
if settings is None:
settings = get_settings()

@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncIterator[None]:
# Configure logging using the app state before the app starts serving.
app.state.logging_manager.configure_logging()

# Disable uvicorn access logs to avoid duplication
uvicorn_logger = logging.getLogger("uvicorn.access")
uvicorn_logger.disabled = True

logger = get_logger("api.startup")
logger.info(
"Application starting up",
extra={
"log_level": settings.log_level,
"log_format": settings.log_format,
"api_version": settings.api_version,
},
)
await create_tables()
logger.info("Database tables created successfully")

# Ensure initial admin user exists
from .database import AsyncSessionLocal
from .crud import ensure_initial_admin

async with AsyncSessionLocal() as db:
await ensure_initial_admin(db, settings.admin_initial_username)

yield

# Create FastAPI instance
app = FastAPI(
title=settings.api_title,
version=settings.api_version,
docs_url="/api/docs",
redoc_url="/api/redoc",
openapi_url="/api/openapi.json",
lifespan=lifespan,
)

# Store dependencies in app state
Expand Down Expand Up @@ -133,35 +166,6 @@ async def log_requests(request: Request, call_next):

return response

# Configure startup event
@app.on_event("startup")
async def startup_event():
# Configure logging using the app state
app.state.logging_manager.configure_logging()

# Disable uvicorn access logs to avoid duplication
uvicorn_logger = logging.getLogger("uvicorn.access")
uvicorn_logger.disabled = True

logger = get_logger("api.startup")
logger.info(
"Application starting up",
extra={
"log_level": settings.log_level,
"log_format": settings.log_format,
"api_version": settings.api_version,
},
)
await create_tables()
logger.info("Database tables created successfully")

# Ensure initial admin user exists
from .database import AsyncSessionLocal
from .crud import ensure_initial_admin

async with AsyncSessionLocal() as db:
await ensure_initial_admin(db, settings.admin_initial_username)

# Include routers
app.include_router(health.router)
app.include_router(commits.router)
Expand Down
2 changes: 1 addition & 1 deletion backend/app/logging_utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Logging utilities for sanitizing sensitive data."""

import re
from typing import Any, Dict, List, Union
from typing import Any, Dict, List

# Patterns for sensitive data
SENSITIVE_PATTERNS = [
Expand Down
3 changes: 1 addition & 2 deletions backend/app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
Boolean,
UniqueConstraint,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import declarative_base, relationship
from datetime import datetime, UTC

Base = declarative_base()
Expand Down
4 changes: 2 additions & 2 deletions backend/app/oauth.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@

import secrets
import logging
from typing import Optional, Dict, Any
from typing import Optional
from authlib.integrations.httpx_client import AsyncOAuth2Client
from fastapi import HTTPException, Request
from fastapi import HTTPException
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession

Expand Down
4 changes: 2 additions & 2 deletions backend/app/routers/binaries.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ async def get_binaries(db: AsyncSession = Depends(get_database)):

try:
binaries = await crud.get_binaries(db)
logger.info(f"Successfully retrieved binaries", extra={"count": len(binaries)})
logger.info("Successfully retrieved binaries", extra={"count": len(binaries)})

return [
schemas.Binary(
Expand All @@ -34,7 +34,7 @@ async def get_binaries(db: AsyncSession = Depends(get_database)):
for binary in binaries
]
except Exception as e:
logger.error(f"Failed to fetch binaries", extra={"error": str(e)})
logger.error("Failed to fetch binaries", extra={"error": str(e)})
raise HTTPException(status_code=500, detail="Failed to fetch binaries")


Expand Down
14 changes: 7 additions & 7 deletions backend/app/routers/commits.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ async def get_commits(
db: AsyncSession = Depends(get_database),
):
logger = get_logger("api.commits")
logger.info(f"Fetching commits", extra={"skip": skip, "limit": limit})
logger.info("Fetching commits", extra={"skip": skip, "limit": limit})

try:
commits = await crud.get_commits(db, skip=skip, limit=limit)
logger.info(f"Successfully retrieved commits", extra={"count": len(commits)})
logger.info("Successfully retrieved commits", extra={"count": len(commits)})

return [
schemas.Commit(
Expand All @@ -41,23 +41,23 @@ async def get_commits(
for commit in commits
]
except Exception as e:
logger.error(f"Failed to fetch commits", extra={"error": str(e)})
logger.error("Failed to fetch commits", extra={"error": str(e)})
raise HTTPException(status_code=500, detail="Failed to fetch commits")


@router.get("/commits/{sha}", response_model=schemas.Commit)
async def get_commit(sha: str, db: AsyncSession = Depends(get_database)):
logger = get_logger("api.commits")
logger.info(f"Fetching commit by SHA", extra={"sha": sha})
logger.info("Fetching commit by SHA", extra={"sha": sha})

try:
commit = await crud.get_commit_by_sha(db, sha=sha)
if commit is None:
logger.warning(f"Commit not found", extra={"sha": sha})
logger.warning("Commit not found", extra={"sha": sha})
raise HTTPException(status_code=404, detail="Commit not found")

logger.info(
f"Successfully retrieved commit",
"Successfully retrieved commit",
extra={
"sha": commit.sha[:8],
"author": commit.author,
Expand All @@ -79,7 +79,7 @@ async def get_commit(sha: str, db: AsyncSession = Depends(get_database)):
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to fetch commit", extra={"sha": sha, "error": str(e)})
logger.error("Failed to fetch commit", extra={"sha": sha, "error": str(e)})
raise HTTPException(status_code=500, detail="Failed to fetch commit")


Expand Down
Loading
Loading