From 6755f427d5cbeb407630cd0d84cac64738dcd354 Mon Sep 17 00:00:00 2001 From: skosmel Date: Wed, 18 Mar 2026 18:31:07 +0100 Subject: [PATCH 1/2] Initial implementation by claud code (Opus) --- Dockerfile | 10 ++ alembic.ini | 38 +++++ alembic/env.py | 47 ++++++ alembic/script.py.mako | 26 ++++ .../44c2bc677b36_create_tasks_table.py | 40 +++++ app/__init__.py | 0 app/config.py | 10 ++ app/database.py | 16 ++ app/main.py | 11 ++ app/models.py | 38 +++++ app/routers/__init__.py | 0 app/routers/tasks.py | 138 ++++++++++++++++++ app/schemas.py | 52 +++++++ docker-compose.yml | 33 +++++ requirements.txt | 11 ++ tests/__init__.py | 0 tests/conftest.py | 51 +++++++ tests/test_create_task.py | 65 +++++++++ tests/test_dashboard.py | 60 ++++++++ tests/test_delete_task.py | 18 +++ tests/test_get_task.py | 15 ++ tests/test_list_filter.py | 48 ++++++ tests/test_search.py | 39 +++++ tests/test_status_transition.py | 54 +++++++ tests/test_update_task.py | 54 +++++++ 25 files changed, 874 insertions(+) create mode 100644 Dockerfile create mode 100644 alembic.ini create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/44c2bc677b36_create_tasks_table.py create mode 100644 app/__init__.py create mode 100644 app/config.py create mode 100644 app/database.py create mode 100644 app/main.py create mode 100644 app/models.py create mode 100644 app/routers/__init__.py create mode 100644 app/routers/tasks.py create mode 100644 app/schemas.py create mode 100644 docker-compose.yml create mode 100644 requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_create_task.py create mode 100644 tests/test_dashboard.py create mode 100644 tests/test_delete_task.py create mode 100644 tests/test_get_task.py create mode 100644 tests/test_list_filter.py create mode 100644 tests/test_search.py create mode 100644 tests/test_status_transition.py create mode 100644 tests/test_update_task.py diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..534cbc6 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..642a96b --- /dev/null +++ b/alembic.ini @@ -0,0 +1,38 @@ +[alembic] +script_location = alembic +prepend_sys_path = . +version_path_separator = os + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..a76d2f8 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,47 @@ +import os +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool + +from app.database import Base +from app.models import Task # noqa: F401 — ensure model is registered + +config = context.config + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +config.set_main_option("sqlalchemy.url", os.environ["DATABASE_URL"]) + +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/44c2bc677b36_create_tasks_table.py b/alembic/versions/44c2bc677b36_create_tasks_table.py new file mode 100644 index 0000000..368c74c --- /dev/null +++ b/alembic/versions/44c2bc677b36_create_tasks_table.py @@ -0,0 +1,40 @@ +"""create tasks table + +Revision ID: 44c2bc677b36 +Revises: +Create Date: 2026-03-18 18:23:24.746897 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '44c2bc677b36' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('tasks', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('title', sa.String(length=200), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('priority', sa.Enum('low', 'medium', 'high', name='taskpriority'), nullable=False), + sa.Column('status', sa.Enum('todo', 'in_progress', 'done', name='taskstatus'), nullable=False), + sa.Column('due_date', sa.Date(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('tasks') + # ### end Alembic commands ### diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..565e459 --- /dev/null +++ b/app/config.py @@ -0,0 +1,10 @@ +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + database_url: str + + model_config = {"env_file": ".env"} + + +settings = Settings() diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..96c53ff --- /dev/null +++ b/app/database.py @@ -0,0 +1,16 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, declarative_base + +from app.config import settings + +engine = create_engine(settings.database_url) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) +Base = declarative_base() + + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..f9d539f --- /dev/null +++ b/app/main.py @@ -0,0 +1,11 @@ +from fastapi import FastAPI + +from app.routers import tasks + +app = FastAPI(title="Task Manager", version="1.0.0") +app.include_router(tasks.router) + + +@app.get("/") +def health_check(): + return {"status": "ok"} diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..3389263 --- /dev/null +++ b/app/models.py @@ -0,0 +1,38 @@ +import enum +import uuid +from datetime import UTC, datetime + +from sqlalchemy import Column, String, Text, Date, DateTime +from sqlalchemy import Enum as SAEnum +from sqlalchemy.dialects.postgresql import UUID + +from app.database import Base + + +def _utcnow(): + return datetime.now(UTC).replace(tzinfo=None) + + +class TaskStatus(str, enum.Enum): + todo = "todo" + in_progress = "in_progress" + done = "done" + + +class TaskPriority(str, enum.Enum): + low = "low" + medium = "medium" + high = "high" + + +class Task(Base): + __tablename__ = "tasks" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + title = Column(String(200), nullable=False) + description = Column(Text, nullable=True) + priority = Column(SAEnum(TaskPriority), nullable=False, default=TaskPriority.medium) + status = Column(SAEnum(TaskStatus), nullable=False, default=TaskStatus.todo) + due_date = Column(Date, nullable=True) + created_at = Column(DateTime, nullable=False, default=_utcnow) + updated_at = Column(DateTime, nullable=False, default=_utcnow, onupdate=_utcnow) diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/routers/tasks.py b/app/routers/tasks.py new file mode 100644 index 0000000..b44d638 --- /dev/null +++ b/app/routers/tasks.py @@ -0,0 +1,138 @@ +from datetime import date +from typing import Optional +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy import func, or_ +from sqlalchemy.orm import Session + +from app.database import get_db +from app.models import Task, TaskPriority, TaskStatus +from app.schemas import ( + DashboardResponse, + StatusTransition, + TaskCreate, + TaskResponse, + TaskUpdate, +) + +router = APIRouter(prefix="/tasks", tags=["tasks"]) + +VALID_TRANSITIONS = { + TaskStatus.todo: TaskStatus.in_progress, + TaskStatus.in_progress: TaskStatus.done, +} + + +@router.post("", response_model=TaskResponse, status_code=201) +def create_task(task_in: TaskCreate, db: Session = Depends(get_db)): + task = Task(**task_in.model_dump()) + db.add(task) + db.commit() + db.refresh(task) + return task + + +@router.get("/search", response_model=list[TaskResponse]) +def search_tasks(q: str = Query(..., min_length=1), db: Session = Depends(get_db)): + pattern = f"%{q}%" + tasks = ( + db.query(Task) + .filter(or_(Task.title.ilike(pattern), Task.description.ilike(pattern))) + .all() + ) + return tasks + + +@router.get("/stats", response_model=DashboardResponse) +def get_stats(db: Session = Depends(get_db)): + total = db.query(func.count(Task.id)).scalar() + + status_rows = db.query(Task.status, func.count(Task.id)).group_by(Task.status).all() + by_status = {s.value: 0 for s in TaskStatus} + for status, count in status_rows: + by_status[status.value] = count + + priority_rows = ( + db.query(Task.priority, func.count(Task.id)).group_by(Task.priority).all() + ) + by_priority = {p.value: 0 for p in TaskPriority} + for priority, count in priority_rows: + by_priority[priority.value] = count + + overdue = ( + db.query(func.count(Task.id)) + .filter(Task.due_date < date.today(), Task.status != TaskStatus.done) + .scalar() + ) + + return DashboardResponse( + total=total, by_status=by_status, by_priority=by_priority, overdue=overdue + ) + + +@router.get("", response_model=list[TaskResponse]) +def list_tasks( + status: Optional[TaskStatus] = None, + priority: Optional[TaskPriority] = None, + db: Session = Depends(get_db), +): + query = db.query(Task) + if status is not None: + query = query.filter(Task.status == status) + if priority is not None: + query = query.filter(Task.priority == priority) + return query.all() + + +@router.get("/{task_id}", response_model=TaskResponse) +def get_task(task_id: UUID, db: Session = Depends(get_db)): + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException(status_code=404, detail="Task not found") + return task + + +@router.put("/{task_id}", response_model=TaskResponse) +def update_task(task_id: UUID, task_in: TaskUpdate, db: Session = Depends(get_db)): + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException(status_code=404, detail="Task not found") + + update_data = task_in.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(task, field, value) + + db.commit() + db.refresh(task) + return task + + +@router.delete("/{task_id}", status_code=204) +def delete_task(task_id: UUID, db: Session = Depends(get_db)): + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException(status_code=404, detail="Task not found") + db.delete(task) + db.commit() + + +@router.patch("/{task_id}/status", response_model=TaskResponse) +def transition_status( + task_id: UUID, body: StatusTransition, db: Session = Depends(get_db) +): + task = db.query(Task).filter(Task.id == task_id).first() + if not task: + raise HTTPException(status_code=404, detail="Task not found") + + expected_next = VALID_TRANSITIONS.get(task.status) + if expected_next != body.status: + raise HTTPException( + status_code=400, + detail=f"Invalid transition: cannot move from '{task.status.value}' to '{body.status.value}'", + ) + + task.status = body.status + db.commit() + db.refresh(task) + return task diff --git a/app/schemas.py b/app/schemas.py new file mode 100644 index 0000000..04db3f6 --- /dev/null +++ b/app/schemas.py @@ -0,0 +1,52 @@ +from datetime import date, datetime +from typing import Optional +from uuid import UUID + +from pydantic import BaseModel, Field, field_validator + +from app.models import TaskPriority, TaskStatus + + +class TaskCreate(BaseModel): + title: str = Field(..., min_length=1, max_length=200) + description: Optional[str] = None + priority: TaskPriority = TaskPriority.medium + due_date: Optional[date] = None + + @field_validator("due_date") + @classmethod + def due_date_must_be_future(cls, v): + if v is not None and v <= date.today(): + raise ValueError("Due date must be in the future") + return v + + +class TaskUpdate(BaseModel): + title: Optional[str] = Field(None, min_length=1, max_length=200) + description: Optional[str] = None + priority: Optional[TaskPriority] = None + due_date: Optional[date] = None + + +class StatusTransition(BaseModel): + status: TaskStatus + + +class TaskResponse(BaseModel): + id: UUID + title: str + description: Optional[str] + priority: TaskPriority + status: TaskStatus + due_date: Optional[date] + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} + + +class DashboardResponse(BaseModel): + total: int + by_status: dict[str, int] + by_priority: dict[str, int] + overdue: int diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..90b5ffe --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,33 @@ +services: + db: + image: postgres:16-alpine + environment: + POSTGRES_USER: taskmanager + POSTGRES_PASSWORD: taskmanager + POSTGRES_DB: taskmanager + ports: + - "5433:5432" + volumes: + - pgdata:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U taskmanager"] + interval: 5s + timeout: 3s + retries: 5 + + app: + build: . + ports: + - "8000:8000" + environment: + DATABASE_URL: postgresql://taskmanager:taskmanager@db:5432/taskmanager + depends_on: + db: + condition: service_healthy + volumes: + - .:/app + command: > + sh -c "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload" + +volumes: + pgdata: diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..665c2c0 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +fastapi>=0.115.0,<1.0.0 +uvicorn[standard]>=0.30.0,<1.0.0 +sqlalchemy>=2.0.0,<3.0.0 +psycopg2-binary>=2.9.0,<3.0.0 +alembic>=1.13.0,<2.0.0 +pydantic>=2.0.0,<3.0.0 +pydantic-settings>=2.0.0,<3.0.0 + +# Testing +pytest>=8.0.0,<9.0.0 +httpx>=0.27.0,<1.0.0 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..79a3280 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,51 @@ +import os + +import pytest +from fastapi.testclient import TestClient +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from app.database import Base, get_db +from app.main import app + +TEST_DATABASE_URL = os.environ.get( + "TEST_DATABASE_URL", + "postgresql://taskmanager:taskmanager@localhost:5433/taskmanager_test", +) + +engine = create_engine(TEST_DATABASE_URL) +TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + + +@pytest.fixture(scope="session", autouse=True) +def create_tables(): + Base.metadata.create_all(bind=engine) + yield + Base.metadata.drop_all(bind=engine) + + +@pytest.fixture(autouse=True) +def db_session(): + connection = engine.connect() + transaction = connection.begin() + session = TestingSessionLocal(bind=connection) + + def override_get_db(): + try: + yield session + finally: + pass + + app.dependency_overrides[get_db] = override_get_db + + yield session + + session.close() + transaction.rollback() + connection.close() + app.dependency_overrides.clear() + + +@pytest.fixture +def client(): + return TestClient(app) diff --git a/tests/test_create_task.py b/tests/test_create_task.py new file mode 100644 index 0000000..2a1b5ff --- /dev/null +++ b/tests/test_create_task.py @@ -0,0 +1,65 @@ +from datetime import date, timedelta + + +def test_create_task_minimal(client): + response = client.post("/tasks", json={"title": "My task"}) + assert response.status_code == 201 + data = response.json() + assert data["title"] == "My task" + assert data["status"] == "todo" + assert data["priority"] == "medium" + assert data["description"] is None + assert data["due_date"] is None + assert "id" in data + assert "created_at" in data + + +def test_create_task_full(client): + future = (date.today() + timedelta(days=7)).isoformat() + response = client.post( + "/tasks", + json={ + "title": "Full task", + "description": "A detailed description", + "priority": "high", + "due_date": future, + }, + ) + assert response.status_code == 201 + data = response.json() + assert data["title"] == "Full task" + assert data["description"] == "A detailed description" + assert data["priority"] == "high" + assert data["due_date"] == future + + +def test_create_task_missing_title(client): + response = client.post("/tasks", json={}) + assert response.status_code == 422 + + +def test_create_task_empty_title(client): + response = client.post("/tasks", json={"title": ""}) + assert response.status_code == 422 + + +def test_create_task_title_too_long(client): + response = client.post("/tasks", json={"title": "a" * 201}) + assert response.status_code == 422 + + +def test_create_task_invalid_priority(client): + response = client.post("/tasks", json={"title": "Test", "priority": "urgent"}) + assert response.status_code == 422 + + +def test_create_task_due_date_in_past(client): + past = (date.today() - timedelta(days=1)).isoformat() + response = client.post("/tasks", json={"title": "Test", "due_date": past}) + assert response.status_code == 422 + + +def test_create_task_due_date_today(client): + today = date.today().isoformat() + response = client.post("/tasks", json={"title": "Test", "due_date": today}) + assert response.status_code == 422 diff --git a/tests/test_dashboard.py b/tests/test_dashboard.py new file mode 100644 index 0000000..e193351 --- /dev/null +++ b/tests/test_dashboard.py @@ -0,0 +1,60 @@ +from datetime import date, timedelta + + +def test_stats_empty(client): + response = client.get("/tasks/stats") + assert response.status_code == 200 + data = response.json() + assert data["total"] == 0 + assert data["by_status"] == {"todo": 0, "in_progress": 0, "done": 0} + assert data["by_priority"] == {"low": 0, "medium": 0, "high": 0} + assert data["overdue"] == 0 + + +def test_stats_with_tasks(client): + client.post("/tasks", json={"title": "T1", "priority": "high"}) + client.post("/tasks", json={"title": "T2", "priority": "low"}) + t3 = client.post("/tasks", json={"title": "T3", "priority": "medium"}) + task_id = t3.json()["id"] + client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + + response = client.get("/tasks/stats") + data = response.json() + assert data["total"] == 3 + assert data["by_status"]["todo"] == 2 + assert data["by_status"]["in_progress"] == 1 + assert data["by_priority"]["high"] == 1 + assert data["by_priority"]["low"] == 1 + assert data["by_priority"]["medium"] == 1 + + +def test_stats_overdue(client, db_session): + from app.models import Task, TaskPriority, TaskStatus + + # Insert a task with a past due date directly (bypassing API validation) + overdue_task = Task( + title="Overdue", + priority=TaskPriority.high, + status=TaskStatus.todo, + due_date=date.today() - timedelta(days=1), + ) + db_session.add(overdue_task) + db_session.flush() + + # A done task with past due date should NOT count as overdue + done_task = Task( + title="Done overdue", + priority=TaskPriority.medium, + status=TaskStatus.done, + due_date=date.today() - timedelta(days=1), + ) + db_session.add(done_task) + db_session.flush() + + # A task with no due date is never overdue + client.post("/tasks", json={"title": "No due date"}) + + response = client.get("/tasks/stats") + data = response.json() + assert data["overdue"] == 1 + assert data["total"] == 3 diff --git a/tests/test_delete_task.py b/tests/test_delete_task.py new file mode 100644 index 0000000..f371c47 --- /dev/null +++ b/tests/test_delete_task.py @@ -0,0 +1,18 @@ +import uuid + + +def test_delete_task(client): + create = client.post("/tasks", json={"title": "To delete"}) + task_id = create.json()["id"] + + response = client.delete(f"/tasks/{task_id}") + assert response.status_code == 204 + + # Verify it's gone + get_response = client.get(f"/tasks/{task_id}") + assert get_response.status_code == 404 + + +def test_delete_not_found(client): + response = client.delete(f"/tasks/{uuid.uuid4()}") + assert response.status_code == 404 diff --git a/tests/test_get_task.py b/tests/test_get_task.py new file mode 100644 index 0000000..d99e6fa --- /dev/null +++ b/tests/test_get_task.py @@ -0,0 +1,15 @@ +import uuid + + +def test_get_task(client): + create = client.post("/tasks", json={"title": "Lookup task"}) + task_id = create.json()["id"] + + response = client.get(f"/tasks/{task_id}") + assert response.status_code == 200 + assert response.json()["title"] == "Lookup task" + + +def test_get_task_not_found(client): + response = client.get(f"/tasks/{uuid.uuid4()}") + assert response.status_code == 404 diff --git a/tests/test_list_filter.py b/tests/test_list_filter.py new file mode 100644 index 0000000..fc45c90 --- /dev/null +++ b/tests/test_list_filter.py @@ -0,0 +1,48 @@ +def test_list_tasks_empty(client): + response = client.get("/tasks") + assert response.status_code == 200 + assert response.json() == [] + + +def test_list_tasks(client): + client.post("/tasks", json={"title": "Task 1"}) + client.post("/tasks", json={"title": "Task 2"}) + response = client.get("/tasks") + assert response.status_code == 200 + assert len(response.json()) == 2 + + +def test_filter_by_status(client): + create = client.post("/tasks", json={"title": "Todo task"}) + task_id = create.json()["id"] + client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + + client.post("/tasks", json={"title": "Another todo"}) + + response = client.get("/tasks", params={"status": "in_progress"}) + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["title"] == "Todo task" + + +def test_filter_by_priority(client): + client.post("/tasks", json={"title": "High", "priority": "high"}) + client.post("/tasks", json={"title": "Low", "priority": "low"}) + + response = client.get("/tasks", params={"priority": "high"}) + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["title"] == "High" + + +def test_filter_by_status_and_priority(client): + client.post("/tasks", json={"title": "High todo", "priority": "high"}) + client.post("/tasks", json={"title": "Low todo", "priority": "low"}) + + response = client.get("/tasks", params={"status": "todo", "priority": "high"}) + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["title"] == "High todo" diff --git a/tests/test_search.py b/tests/test_search.py new file mode 100644 index 0000000..03b07a3 --- /dev/null +++ b/tests/test_search.py @@ -0,0 +1,39 @@ +def test_search_by_title(client): + client.post("/tasks", json={"title": "Deploy to production"}) + client.post("/tasks", json={"title": "Write tests"}) + + response = client.get("/tasks/search", params={"q": "deploy"}) + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["title"] == "Deploy to production" + + +def test_search_by_description(client): + client.post( + "/tasks", + json={"title": "Task A", "description": "Fix the authentication bug"}, + ) + client.post("/tasks", json={"title": "Task B", "description": "Add logging"}) + + response = client.get("/tasks/search", params={"q": "authentication"}) + assert response.status_code == 200 + data = response.json() + assert len(data) == 1 + assert data[0]["title"] == "Task A" + + +def test_search_case_insensitive(client): + client.post("/tasks", json={"title": "URGENT Fix"}) + + response = client.get("/tasks/search", params={"q": "urgent"}) + assert response.status_code == 200 + assert len(response.json()) == 1 + + +def test_search_no_results(client): + client.post("/tasks", json={"title": "Something"}) + + response = client.get("/tasks/search", params={"q": "nonexistent"}) + assert response.status_code == 200 + assert response.json() == [] diff --git a/tests/test_status_transition.py b/tests/test_status_transition.py new file mode 100644 index 0000000..5c01349 --- /dev/null +++ b/tests/test_status_transition.py @@ -0,0 +1,54 @@ +import uuid + + +def test_todo_to_in_progress(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + response = client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + assert response.status_code == 200 + assert response.json()["status"] == "in_progress" + + +def test_in_progress_to_done(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + response = client.patch(f"/tasks/{task_id}/status", json={"status": "done"}) + assert response.status_code == 200 + assert response.json()["status"] == "done" + + +def test_todo_to_done_invalid(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + response = client.patch(f"/tasks/{task_id}/status", json={"status": "done"}) + assert response.status_code == 400 + + +def test_done_to_in_progress_invalid(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + client.patch(f"/tasks/{task_id}/status", json={"status": "done"}) + response = client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + assert response.status_code == 400 + + +def test_in_progress_to_todo_invalid(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + client.patch(f"/tasks/{task_id}/status", json={"status": "in_progress"}) + response = client.patch(f"/tasks/{task_id}/status", json={"status": "todo"}) + assert response.status_code == 400 + + +def test_transition_not_found(client): + response = client.patch( + f"/tasks/{uuid.uuid4()}/status", json={"status": "in_progress"} + ) + assert response.status_code == 404 diff --git a/tests/test_update_task.py b/tests/test_update_task.py new file mode 100644 index 0000000..814d5fc --- /dev/null +++ b/tests/test_update_task.py @@ -0,0 +1,54 @@ +import uuid +from datetime import date, timedelta + + +def test_update_title(client): + create = client.post("/tasks", json={"title": "Original"}) + task_id = create.json()["id"] + + response = client.put(f"/tasks/{task_id}", json={"title": "Updated"}) + assert response.status_code == 200 + assert response.json()["title"] == "Updated" + + +def test_update_description(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + response = client.put(f"/tasks/{task_id}", json={"description": "New desc"}) + assert response.status_code == 200 + assert response.json()["description"] == "New desc" + assert response.json()["title"] == "Task" # unchanged + + +def test_update_priority(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + response = client.put(f"/tasks/{task_id}", json={"priority": "high"}) + assert response.status_code == 200 + assert response.json()["priority"] == "high" + + +def test_update_due_date(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + future = (date.today() + timedelta(days=30)).isoformat() + response = client.put(f"/tasks/{task_id}", json={"due_date": future}) + assert response.status_code == 200 + assert response.json()["due_date"] == future + + +def test_update_not_found(client): + response = client.put(f"/tasks/{uuid.uuid4()}", json={"title": "Nope"}) + assert response.status_code == 404 + + +def test_update_does_not_change_status(client): + create = client.post("/tasks", json={"title": "Task"}) + task_id = create.json()["id"] + + # PUT body with no status field — status should remain "todo" + response = client.put(f"/tasks/{task_id}", json={"title": "Updated"}) + assert response.json()["status"] == "todo" From 89e66f82748578438b49296d2f043f28a8f3a656 Mon Sep 17 00:00:00 2001 From: skosmel Date: Wed, 18 Mar 2026 18:38:11 +0100 Subject: [PATCH 2/2] CLAUDE.md --- CLAUDE.md | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..735a3cb --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,50 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Commands + +```bash +# Start services (PostgreSQL + app with auto-migration) +docker compose up + +# Run full test suite (requires test DB and venv) +source .venv/bin/activate +DATABASE_URL=postgresql://taskmanager:taskmanager@localhost:5433/taskmanager \ +TEST_DATABASE_URL=postgresql://taskmanager:taskmanager@localhost:5433/taskmanager_test \ +pytest tests/ -v + +# Run a single test +pytest tests/test_create_task.py::test_create_task_minimal -v + +# Generate a new migration after model changes +DATABASE_URL=postgresql://taskmanager:taskmanager@localhost:5433/taskmanager \ +alembic revision --autogenerate -m "description" + +# Apply migrations +DATABASE_URL=postgresql://taskmanager:taskmanager@localhost:5433/taskmanager \ +alembic upgrade head +``` + +Database credentials: `taskmanager`/`taskmanager`, DB name `taskmanager`, Docker-exposed on port **5433** (not 5432). Test DB: `taskmanager_test` on same server. + +## Architecture + +FastAPI REST API with SQLAlchemy ORM and PostgreSQL. Single domain entity: **Task**. + +- `app/main.py` — FastAPI app, includes the tasks router +- `app/models.py` — `Task` SQLAlchemy model with `TaskStatus` and `TaskPriority` enums +- `app/schemas.py` — Pydantic v2 request/response schemas with `from_attributes` for ORM mapping +- `app/routers/tasks.py` — all 8 endpoints in a single router (prefix `/tasks`) +- `app/database.py` — engine, session factory, `get_db()` dependency +- `app/config.py` — reads `DATABASE_URL` from environment via pydantic-settings + +**Route ordering matters:** `/tasks/search` and `/tasks/stats` are defined before `/tasks/{task_id}` to prevent FastAPI from parsing literal paths as UUID parameters. + +**Status transitions** follow a strict state machine (`VALID_TRANSITIONS` dict): `todo → in_progress → done`. No skipping, no backwards. Enforced in `PATCH /tasks/{task_id}/status`. + +**Partial updates** use `model_dump(exclude_unset=True)` so only client-supplied fields are changed. + +## Testing + +Tests use a real PostgreSQL database (not SQLite) because the app uses PG-specific features (UUID type, ILIKE, ENUM). Each test runs in a transaction that rolls back automatically (`conftest.py` overrides `get_db` with a transactional session). The `db_session` fixture is available for direct ORM access when bypassing API validation (e.g., inserting overdue tasks with past due dates).