commit ace2167ebd12032adf7a52061272e131d396e20e Author: vogonwann Date: Sat Jan 31 17:49:36 2026 +0100 Initial commit diff --git a/__pycache__/crud.cpython-314.pyc b/__pycache__/crud.cpython-314.pyc new file mode 100644 index 0000000..45d377a Binary files /dev/null and b/__pycache__/crud.cpython-314.pyc differ diff --git a/__pycache__/db.cpython-314.pyc b/__pycache__/db.cpython-314.pyc new file mode 100644 index 0000000..1647f5a Binary files /dev/null and b/__pycache__/db.cpython-314.pyc differ diff --git a/__pycache__/main.cpython-314.pyc b/__pycache__/main.cpython-314.pyc new file mode 100644 index 0000000..cc0e1e5 Binary files /dev/null and b/__pycache__/main.cpython-314.pyc differ diff --git a/__pycache__/models.cpython-314.pyc b/__pycache__/models.cpython-314.pyc new file mode 100644 index 0000000..c02f746 Binary files /dev/null and b/__pycache__/models.cpython-314.pyc differ diff --git a/__pycache__/schemas.cpython-314.pyc b/__pycache__/schemas.cpython-314.pyc new file mode 100644 index 0000000..f8d9267 Binary files /dev/null and b/__pycache__/schemas.cpython-314.pyc differ diff --git a/crud.py b/crud.py new file mode 100644 index 0000000..fcc6e20 --- /dev/null +++ b/crud.py @@ -0,0 +1,125 @@ +from datetime import date +from typing import List, Optional + +from sqlalchemy import and_, desc, select +from sqlalchemy.orm import Session + +from models import Project, WorkLogEntry + + +def create_project(db: Session, name: str, description: Optional[str]) -> Project: + project = Project(name=name, description=description) + db.add(project) + db.commit() + db.refresh(project) + return project + + +def get_project(db: Session, project_id: int) -> Optional[Project]: + return db.get(Project, project_id) + + +def get_project_by_name(db: Session, name: str) -> Optional[Project]: + stmt = select(Project).where(Project.name == name) + return db.execute(stmt).scalars().first() + + +def list_projects(db: Session, limit: int, offset: int) -> List[Project]: + stmt = select(Project).order_by(desc(Project.id)).limit(limit).offset(offset) + return db.execute(stmt).scalars().all() + + +def update_project( + db: Session, + project: Project, + name: Optional[str], + description: Optional[str], +) -> Project: + if name is not None: + project.name = name + if description is not None: + project.description = description + db.commit() + db.refresh(project) + return project + + +def delete_project(db: Session, project: Project) -> None: + db.delete(project) + db.commit() + + +def create_work_log( + db: Session, + project_id: int, + log_date: date, + hours: float, + description: str, +) -> WorkLogEntry: + entry = WorkLogEntry( + project_id=project_id, + date=log_date, + hours=hours, + description=description, + ) + db.add(entry) + db.commit() + db.refresh(entry) + return entry + + +def get_work_log(db: Session, log_id: int) -> Optional[WorkLogEntry]: + return db.get(WorkLogEntry, log_id) + + +def list_work_logs(db: Session, limit: int, offset: int) -> List[WorkLogEntry]: + stmt = ( + select(WorkLogEntry) + .order_by(desc(WorkLogEntry.date), desc(WorkLogEntry.id)) + .limit(limit) + .offset(offset) + ) + return db.execute(stmt).scalars().all() + + +def list_work_logs_by_range( + db: Session, + start_date: date, + end_date: date, + limit: int, + offset: int, +) -> List[WorkLogEntry]: + stmt = ( + select(WorkLogEntry) + .where(and_(WorkLogEntry.date >= start_date, WorkLogEntry.date <= end_date)) + .order_by(desc(WorkLogEntry.date), desc(WorkLogEntry.id)) + .limit(limit) + .offset(offset) + ) + return db.execute(stmt).scalars().all() + + +def update_work_log( + db: Session, + entry: WorkLogEntry, + project_id: Optional[int], + log_date: Optional[date], + hours: Optional[float], + description: Optional[str], +) -> WorkLogEntry: + if project_id is not None: + entry.project_id = project_id + if log_date is not None: + entry.date = log_date + if hours is not None: + entry.hours = hours + if description is not None: + entry.description = description + db.commit() + db.refresh(entry) + return entry + + +def delete_work_log(db: Session, entry: WorkLogEntry) -> None: + db.delete(entry) + db.commit() diff --git a/db.py b/db.py new file mode 100644 index 0000000..c40aeb8 --- /dev/null +++ b/db.py @@ -0,0 +1,19 @@ +from sqlalchemy import create_engine, event +from sqlalchemy.orm import sessionmaker + +DATABASE_URL = "sqlite:///./worklog.db" + +engine = create_engine( + DATABASE_URL, + connect_args={"check_same_thread": False}, + future=True, +) + + +@event.listens_for(engine, "connect") +def _set_sqlite_pragma(dbapi_connection, connection_record) -> None: + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + +SessionLocal = sessionmaker(bind=engine, autocommit=False, autoflush=False, future=True) diff --git a/main.py b/main.py new file mode 100644 index 0000000..5e2ec21 --- /dev/null +++ b/main.py @@ -0,0 +1,185 @@ +import calendar +from datetime import date, timedelta +from typing import Generator, List + +from fastapi import Depends, FastAPI, HTTPException, Query, status +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session + +import crud +from db import SessionLocal, engine +from models import Base +from schemas import ( + ProjectCreate, + ProjectRead, + ProjectUpdate, + WorkLogCreate, + WorkLogRead, + WorkLogUpdate, +) + +app = FastAPI(title="Work Log API") + + +def get_db() -> Generator[Session, None, None]: + db = SessionLocal() + try: + yield db + finally: + db.close() + + +@app.on_event("startup") +def on_startup() -> None: + Base.metadata.create_all(bind=engine) + + +@app.post("/projects", response_model=ProjectRead, status_code=status.HTTP_201_CREATED) +def create_project(payload: ProjectCreate, db: Session = Depends(get_db)) -> ProjectRead: + existing = crud.get_project_by_name(db, payload.name) + if existing: + raise HTTPException(status_code=409, detail="project name already exists") + try: + return crud.create_project(db, payload.name, payload.description) + except IntegrityError: + raise HTTPException(status_code=409, detail="project name already exists") + + +@app.get("/projects", response_model=List[ProjectRead]) +def list_projects( + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), +) -> List[ProjectRead]: + return crud.list_projects(db, limit, offset) + + +@app.get("/projects/{project_id}", response_model=ProjectRead) +def get_project(project_id: int, db: Session = Depends(get_db)) -> ProjectRead: + project = crud.get_project(db, project_id) + if not project: + raise HTTPException(status_code=404, detail="project not found") + return project + + +@app.put("/projects/{project_id}", response_model=ProjectRead) +def update_project( + project_id: int, + payload: ProjectUpdate, + db: Session = Depends(get_db), +) -> ProjectRead: + project = crud.get_project(db, project_id) + if not project: + raise HTTPException(status_code=404, detail="project not found") + if payload.name: + existing = crud.get_project_by_name(db, payload.name) + if existing and existing.id != project_id: + raise HTTPException(status_code=409, detail="project name already exists") + return crud.update_project(db, project, payload.name, payload.description) + + +@app.delete("/projects/{project_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_project(project_id: int, db: Session = Depends(get_db)) -> None: + project = crud.get_project(db, project_id) + if not project: + raise HTTPException(status_code=404, detail="project not found") + crud.delete_project(db, project) + return None + + +@app.post("/work-logs", response_model=WorkLogRead, status_code=status.HTTP_201_CREATED) +def create_work_log(payload: WorkLogCreate, db: Session = Depends(get_db)) -> WorkLogRead: + project = crud.get_project(db, payload.project_id) + if not project: + raise HTTPException(status_code=404, detail="project not found") + return crud.create_work_log( + db, + payload.project_id, + payload.date, + payload.hours, + payload.description, + ) + + +@app.get("/work-logs", response_model=List[WorkLogRead]) +def list_work_logs( + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), +) -> List[WorkLogRead]: + return crud.list_work_logs(db, limit, offset) + + +@app.get("/work-logs/{log_id}", response_model=WorkLogRead) +def get_work_log(log_id: int, db: Session = Depends(get_db)) -> WorkLogRead: + entry = crud.get_work_log(db, log_id) + if not entry: + raise HTTPException(status_code=404, detail="work log not found") + return entry + + +@app.put("/work-logs/{log_id}", response_model=WorkLogRead) +def update_work_log( + log_id: int, + payload: WorkLogUpdate, + db: Session = Depends(get_db), +) -> WorkLogRead: + entry = crud.get_work_log(db, log_id) + if not entry: + raise HTTPException(status_code=404, detail="work log not found") + if payload.project_id is not None: + project = crud.get_project(db, payload.project_id) + if not project: + raise HTTPException(status_code=404, detail="project not found") + return crud.update_work_log( + db, + entry, + payload.project_id, + payload.date, + payload.hours, + payload.description, + ) + + +@app.delete("/work-logs/{log_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_work_log(log_id: int, db: Session = Depends(get_db)) -> None: + entry = crud.get_work_log(db, log_id) + if not entry: + raise HTTPException(status_code=404, detail="work log not found") + crud.delete_work_log(db, entry) + return None + + +@app.get("/work-logs/day", response_model=List[WorkLogRead]) +def work_logs_for_day( + date_param: date = Query(..., alias="date"), + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), +) -> List[WorkLogRead]: + return crud.list_work_logs_by_range(db, date_param, date_param, limit, offset) + + +@app.get("/work-logs/week", response_model=List[WorkLogRead]) +def work_logs_for_week( + date_param: date = Query(..., alias="date"), + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), +) -> List[WorkLogRead]: + start = date_param - timedelta(days=date_param.weekday()) + end = start + timedelta(days=6) + return crud.list_work_logs_by_range(db, start, end, limit, offset) + + +@app.get("/work-logs/month", response_model=List[WorkLogRead]) +def work_logs_for_month( + date_param: date = Query(..., alias="date"), + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0), + db: Session = Depends(get_db), +) -> List[WorkLogRead]: + last_day = calendar.monthrange(date_param.year, date_param.month)[1] + start = date_param.replace(day=1) + end = date_param.replace(day=last_day) + return crud.list_work_logs_by_range(db, start, end, limit, offset) diff --git a/models.py b/models.py new file mode 100644 index 0000000..b75233d --- /dev/null +++ b/models.py @@ -0,0 +1,40 @@ +from datetime import datetime + +from sqlalchemy import Column, Date, DateTime, Float, ForeignKey, Integer, String +from sqlalchemy.orm import declarative_base, relationship + +Base = declarative_base() + + +class Project(Base): + __tablename__ = "projects" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(200), unique=True, nullable=False, index=True) + description = Column(String(1000), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + work_logs = relationship( + "WorkLogEntry", + back_populates="project", + cascade="all, delete-orphan", + passive_deletes=True, + ) + + +class WorkLogEntry(Base): + __tablename__ = "work_logs" + + id = Column(Integer, primary_key=True, index=True) + project_id = Column( + Integer, + ForeignKey("projects.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + date = Column(Date, nullable=False, index=True) + hours = Column(Float, nullable=False) + description = Column(String(2000), nullable=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + project = relationship("Project", back_populates="work_logs") diff --git a/schemas.py b/schemas.py new file mode 100644 index 0000000..96e6d2a --- /dev/null +++ b/schemas.py @@ -0,0 +1,68 @@ +from datetime import date, datetime +from typing import Optional + +from pydantic import BaseModel, ConfigDict, field_validator + + +def _validate_half_hours(value: float) -> float: + if value <= 0: + raise ValueError("hours must be greater than 0") + scaled = value * 2 + if abs(scaled - round(scaled)) > 1e-6: + raise ValueError("hours must be in 0.5 increments") + return value + + +class ProjectBase(BaseModel): + name: str + description: Optional[str] = None + + +class ProjectCreate(ProjectBase): + pass + + +class ProjectUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + + +class ProjectRead(ProjectBase): + id: int + created_at: datetime + + model_config = ConfigDict(from_attributes=True) + + +class WorkLogBase(BaseModel): + project_id: int + date: date + hours: float + description: str + + _validate_hours = field_validator("hours")(_validate_half_hours) + + +class WorkLogCreate(WorkLogBase): + pass + + +class WorkLogUpdate(BaseModel): + project_id: Optional[int] = None + date: Optional[date] = None + hours: Optional[float] = None + description: Optional[str] = None + + @field_validator("hours") + @classmethod + def validate_hours(cls, value: Optional[float]) -> Optional[float]: + if value is None: + return value + return _validate_half_hours(value) + + +class WorkLogRead(WorkLogBase): + id: int + created_at: datetime + + model_config = ConfigDict(from_attributes=True) diff --git a/worklog.db b/worklog.db new file mode 100644 index 0000000..08eb98c Binary files /dev/null and b/worklog.db differ