commit 5a29141d9beedd0a2ccce6aebee1661c7280e08d Author: Trevor Humphrey Date: Thu Jan 15 15:46:35 2026 -0500 initial diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c8efae3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.12-slim + +# System deps for pandas/openpyxl, Postgres, and Argon2 (via argon2-cffi) +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + libpq-dev \ + libffi-dev \ + fonts-dejavu \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY requirements.txt /app/ +RUN pip install --no-cache-dir -r requirements.txt + +COPY app /app/app + +ENV PYTHONUNBUFFERED=1 +ENV PORT=5070 +EXPOSE 5070 + +# Start the FastAPI app with Uvicorn +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "5070", "--log-level", "info"] \ No newline at end of file diff --git a/app/attendance.py b/app/attendance.py new file mode 100644 index 0000000..acc9ddd --- /dev/null +++ b/app/attendance.py @@ -0,0 +1,330 @@ +from datetime import date, timedelta +from typing import List, Dict, Tuple, Optional + +from fastapi import APIRouter, Request, Depends, Query, HTTPException +from fastapi.responses import PlainTextResponse +from sqlalchemy import func, and_, case +from sqlalchemy.orm import Session + +from .db import get_session +from .models import Employee, TimeEntry, TimesheetStatus + +router = APIRouter(prefix="/attendance", tags=["Attendance"]) + +def _daterange(d1: date, d2: date): + cur = d1 + step = timedelta(days=1) + while cur <= d2: + yield cur + cur += step + +def _to_date(v: Optional[str], fallback: date) -> date: + try: + if v: + return date.fromisoformat(v) + except Exception: + pass + return fallback + +@router.get("", name="attendance_builder") +def attendance_builder_page( + request: Request, + start: Optional[str] = Query(None), # YYYY-MM-DD + end: Optional[str] = Query(None), # YYYY-MM-DD + employee_id: Optional[str] = Query("all"), # "all" or single id + include_weekends: int = Query(0), + db: Session = Depends(get_session), +): + # Admin-only + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + # Defaults: current month + today = date.today() + default_start = today.replace(day=1) + start_d = _to_date(start, default_start) + end_d = _to_date(end, today) + if end_d < start_d: + start_d, end_d = end_d, start_d + + # Employees list + all_emps = db.query(Employee).order_by(Employee.name.asc()).all() + + # Determine selected ids from dropdown value + selected_ids: List[int] + if not employee_id or employee_id == "all": + selected_ids = [e.id for e in all_emps] + else: + try: + selected_ids = [int(employee_id)] + except Exception: + selected_ids = [e.id for e in all_emps] + + # Normalize PTO type (trim+lower) + normalized_pto = func.lower(func.trim(func.coalesce(TimeEntry.pto_type, ""))) + + # Hours and flags per PTO subtype + off_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("off%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("off") + off_flag_expr = func.coalesce(func.max(case((normalized_pto.like("off%"), 1), else_=0)), 0).label("off_flag") + + sick_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("sick%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("sick") + sick_flag_expr = func.coalesce(func.max(case((normalized_pto.like("sick%"), 1), else_=0)), 0).label("sick_flag") + + pto_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("pto%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("pto") + + # Aggregate per employee/date (SUBMITTED ONLY) + q = ( + db.query( + TimeEntry.employee_id.label("emp_id"), + TimeEntry.work_date.label("d"), + func.coalesce(func.sum(TimeEntry.total_hours), 0).label("total"), + func.coalesce(func.sum(TimeEntry.break_hours), 0).label("breaks"), + pto_hours_expr, + off_hours_expr, + off_flag_expr, + sick_hours_expr, + sick_flag_expr, + func.coalesce(func.sum(TimeEntry.holiday_hours), 0).label("holiday"), + func.coalesce(func.sum(TimeEntry.bereavement_hours), 0).label("other"), # "Other" from bereavement bucket + ) + .join( + TimesheetStatus, + and_( + TimesheetStatus.timesheet_id == TimeEntry.timesheet_id, + TimesheetStatus.employee_id == TimeEntry.employee_id, + TimesheetStatus.status == "submitted", + ), + ) + .filter( + TimeEntry.work_date >= start_d, + TimeEntry.work_date <= end_d, + TimeEntry.employee_id.in_(selected_ids) if selected_ids else True, + ) + ) + + rows = ( + q.group_by(TimeEntry.employee_id, TimeEntry.work_date) + .order_by(TimeEntry.employee_id.asc(), TimeEntry.work_date.asc()) + .all() + ) + + # Build (emp_id, date) -> summary + per_day: Dict[Tuple[int, date], Dict[str, float]] = {} + for r in rows: + worked = float(r.total or 0) - float(r.breaks or 0) + if worked < 0: + worked = 0.0 + per_day[(int(r.emp_id), r.d)] = { + "worked": worked, + "pto": float(r.pto or 0), + "off": float(r.off or 0), + "off_flag": int(r.off_flag or 0), + "sick": float(r.sick or 0), + "sick_flag": int(r.sick_flag or 0), + "holiday": float(r.holiday or 0), + "other": float(r.other or 0), + } + + days = [d for d in _daterange(start_d, end_d) if include_weekends or d.weekday() < 5] + + visual = [] + for e in all_emps: + if selected_ids and e.id not in selected_ids: + continue + + # Has any submitted data in range? Used for classifying gaps. + has_data = any(k[0] == e.id for k in per_day.keys()) + + cells = [] + totals = { + "worked_days": 0, + "off_days": 0, + "sick_days": 0, + "pto_days": 0, + "holiday_days": 0, + "other_days": 0, + } + hours = {"worked": 0.0, "off": 0.0, "sick": 0.0, "pto": 0.0, "holiday": 0.0, "other": 0.0} + + for d in days: + info = per_day.get((e.id, d)) + weekend = d.weekday() >= 5 + + if info: + # Precedence: holiday > off > sick > pto > other > worked + if info["holiday"] > 0: + st = "holiday" + totals["holiday_days"] += 1 + hours["holiday"] += info["holiday"] + elif info["off_flag"] > 0 or info["off"] > 0: + st = "off" + totals["off_days"] += 1 + hours["off"] += info["off"] + elif info["sick_flag"] > 0 or info["sick"] > 0: + st = "sick" + totals["sick_days"] += 1 + hours["sick"] += info["sick"] + elif info["pto"] > 0: + st = "pto" + totals["pto_days"] += 1 + hours["pto"] += info["pto"] + elif info["other"] > 0: + st = "other" + totals["other_days"] += 1 + hours["other"] += info["other"] + elif info["worked"] > 0: + st = "worked" + totals["worked_days"] += 1 + hours["worked"] += info["worked"] + else: + st = "nodata" + else: + if weekend: + st = "weekend" + else: + # Gaps (weekday without submitted row) count as Off when the employee has any submissions in range + st = "off" if has_data else "nodata" + if st == "off": + totals["off_days"] += 1 + # No hours added for inferred Off gap + + cells.append({"date": d, "status": st}) + + visual.append({ + "employee": e, + "cells": cells, + "totals": totals, + "hours": {k: round(v, 2) for k, v in hours.items()}, + }) + + return request.app.state.templates.TemplateResponse( + "attendance.html", + { + "request": request, + "employees": all_emps, + "selected_ids": selected_ids, + "selected_employee_id": (None if (not employee_id or employee_id == 'all') else int(employee_id)), + "start": start_d, + "end": end_d, + "days": days, + "include_weekends": include_weekends, + "visual": visual, + }, + ) + +@router.get("/export.csv", response_class=PlainTextResponse) +def attendance_export_csv( + request: Request, + start: str = Query(...), + end: str = Query(...), + employee_id: Optional[str] = Query("all"), + include_weekends: int = Query(0), + db: Session = Depends(get_session), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + start_d = _to_date(start, date.today()) + end_d = _to_date(end, date.today()) + if end_d < start_d: + start_d, end_d = end_d, start_d + + ids: List[int] = [] + all_emps = db.query(Employee).order_by(Employee.name.asc()).all() + if not employee_id or employee_id == "all": + ids = [e.id for e in all_emps] + else: + try: + ids = [int(employee_id)] + except Exception: + ids = [e.id for e in all_emps] + + normalized_pto = func.lower(func.trim(func.coalesce(TimeEntry.pto_type, ""))) + off_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("off%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("off") + off_flag_expr = func.coalesce(func.max(case((normalized_pto.like("off%"), 1), else_=0)), 0).label("off_flag") + + sick_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("sick%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("sick") + sick_flag_expr = func.coalesce(func.max(case((normalized_pto.like("sick%"), 1), else_=0)), 0).label("sick_flag") + + pto_hours_expr = func.coalesce( + func.sum(case((normalized_pto.like("pto%"), func.coalesce(TimeEntry.pto_hours, 0)), else_=0)), + 0, + ).label("pto") + + q = ( + db.query( + Employee.name, + TimeEntry.employee_id, + TimeEntry.work_date, + func.coalesce(func.sum(TimeEntry.total_hours), 0).label("total"), + func.coalesce(func.sum(TimeEntry.break_hours), 0).label("breaks"), + pto_hours_expr, + off_hours_expr, + off_flag_expr, + sick_hours_expr, + sick_flag_expr, + func.coalesce(func.sum(TimeEntry.holiday_hours), 0).label("holiday"), + func.coalesce(func.sum(TimeEntry.bereavement_hours), 0).label("other"), + ) + .join(Employee, Employee.id == TimeEntry.employee_id) + .join( + TimesheetStatus, + and_( + TimesheetStatus.timesheet_id == TimeEntry.timesheet_id, + TimesheetStatus.employee_id == TimeEntry.employee_id, + TimesheetStatus.status == "submitted", + ), + ) + .filter( + TimeEntry.work_date >= start_d, + TimeEntry.work_date <= end_d, + TimeEntry.employee_id.in_(ids) if ids else True, + ) + ) + + rows = ( + q.group_by(Employee.name, TimeEntry.employee_id, TimeEntry.work_date) + .order_by(Employee.name.asc(), TimeEntry.work_date.asc()) + .all() + ) + + out = ["Employee,Date,Status,WorkedHours,OffHours,SickHours,PTOHours,HolidayHours,OtherHours"] + for r in rows: + worked = float(r.total or 0) - float(r.breaks or 0) + if worked < 0: + worked = 0.0 + if (r.holiday or 0) > 0: + st = "holiday" + elif int(r.off_flag or 0) > 0 or (r.off or 0) > 0: + st = "off" + elif int(r.sick_flag or 0) > 0 or (r.sick or 0) > 0: + st = "sick" + elif (r.pto or 0) > 0: + st = "pto" + elif (r.other or 0) > 0: + st = "other" + elif worked > 0: + st = "worked" + else: + st = "nodata" + out.append( + f"{r[0]},{r.work_date.isoformat()},{st},{worked:.2f},{float(r.off or 0):.2f}," + f"{float(r.sick or 0):.2f},{float(r.pto or 0):.2f},{float(r.holiday or 0):.2f},{float(r.other or 0):.2f}" + ) + + return PlainTextResponse("\n".join(out), media_type="text/csv") \ No newline at end of file diff --git a/app/auth.py b/app/auth.py new file mode 100644 index 0000000..4ae4ca1 --- /dev/null +++ b/app/auth.py @@ -0,0 +1,62 @@ +import inspect +from functools import wraps +from typing import Callable, Any, Tuple, Optional +from fastapi import Request +from fastapi.responses import RedirectResponse +from passlib.context import CryptContext + +# Accept Argon2 (preferred) and legacy bcrypt; new hashes will be Argon2. +pwd_context = CryptContext( + schemes=["argon2", "bcrypt"], + deprecated="auto", +) + +def hash_password(p: str) -> str: + return pwd_context.hash(p) + +def verify_password(p: str, hashed: str) -> bool: + return pwd_context.verify(p, hashed) + +def verify_and_update_password(p: str, hashed: str) -> Tuple[bool, Optional[str]]: + """ + Returns (verified, new_hash). If verified is True and new_hash is not None, + caller should persist the new_hash (Argon2) to upgrade legacy bcrypt. + """ + try: + return pwd_context.verify_and_update(p, hashed) + except Exception: + return False, None + +def _extract_request(args, kwargs) -> Optional[Request]: + req: Optional[Request] = kwargs.get("request") + if isinstance(req, Request): + return req + for a in args: + if isinstance(a, Request): + return a + return None + +def login_required(endpoint: Callable[..., Any]): + """ + Decorator that supports both sync and async FastAPI endpoints. + Redirects to /login when no session is present. + """ + if inspect.iscoroutinefunction(endpoint): + @wraps(endpoint) + async def async_wrapper(*args, **kwargs): + request = _extract_request(args, kwargs) + if not request or not request.session.get("user_id"): + return RedirectResponse(url="/login", status_code=303) + return await endpoint(*args, **kwargs) + return async_wrapper + else: + @wraps(endpoint) + def sync_wrapper(*args, **kwargs): + request = _extract_request(args, kwargs) + if not request or not request.session.get("user_id"): + return RedirectResponse(url="/login", status_code=303) + return endpoint(*args, **kwargs) + return sync_wrapper + +def get_current_user(request: Request): + return {"id": request.session.get("user_id"), "username": request.session.get("username")} \ No newline at end of file diff --git a/app/db.py b/app/db.py new file mode 100644 index 0000000..180b21b --- /dev/null +++ b/app/db.py @@ -0,0 +1,19 @@ +import os +from sqlalchemy import create_engine, text +from sqlalchemy.orm import sessionmaker + +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+psycopg://timekeeper:timekeeper_pw@db:5432/timekeeper") + +engine = create_engine(DATABASE_URL, pool_pre_ping=True, future=True) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, future=True) + +def get_session(): + db = SessionLocal() + try: + yield db + finally: + db.close() + +def ping_db(): + with engine.connect() as conn: + conn.execute(text("SELECT 1")) \ No newline at end of file diff --git a/app/dept_importer.py b/app/dept_importer.py new file mode 100644 index 0000000..2e14952 --- /dev/null +++ b/app/dept_importer.py @@ -0,0 +1,950 @@ +import os +import io +import csv +import json +from datetime import datetime, date as date_type, time as time_type, timedelta +from typing import Dict, List, Optional, Any, Tuple +from decimal import Decimal + +from fastapi import APIRouter, Request, Depends, UploadFile, File, Form, Query, HTTPException +from fastapi.responses import RedirectResponse, HTMLResponse +from sqlalchemy.orm import Session +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, func, text + +from .db import get_session +from .models import Base, Employee, TimeEntry, TimesheetPeriod +from .utils import enumerate_timesheets_global, D, q2 +from .process_excel import ( + detect_header_map, + parse_date, + parse_datetime_value, + parse_time_value, + safe_decimal, +) + +router = APIRouter(prefix="/import/department", tags=["Department Import"]) + +class ImportBatch(Base): + __tablename__ = "import_batches" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, index=True, nullable=False) + source_name = Column(String(255), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + +class ImportBatchItem(Base): + __tablename__ = "import_batch_items" + id = Column(Integer, primary_key=True, autoincrement=True) + batch_id = Column(Integer, ForeignKey("import_batches.id"), nullable=False, index=True) + time_entry_id = Column(Integer, ForeignKey("time_entries.id"), nullable=False, index=True) + +# ------------------------- +# Mapping helpers +# ------------------------- + +# Hide "Hours Worked Minus Break" from the mapping UI to avoid confusion; backend still auto-detects it. +TARGET_FIELDS: List[Tuple[str, str, bool]] = [ + ("employee", "Employee Name", True), + ("work_date", "Work Date", True), + ("clock_in", "Clock In", False), + ("clock_out", "Clock Out", False), + ("break_hours", "Break Hours", False), + ("total_hours", "Hours Worked", False), + # ("total_minus_break", "Hours Worked Minus Break", False), # HIDDEN FROM UI + ("pto_hours", "PTO Hours", False), + ("pto_type", "PTO Type", False), + ("holiday_hours", "Holiday Hours", False), + ("bereavement_hours", "Bereavement/Other Hours", False), +] + +def _store_upload_file(data: bytes, original_name: str) -> str: + os.makedirs("uploads", exist_ok=True) + _, ext = os.path.splitext(original_name) + slug = f"dept-map-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{os.getpid()}{ext or ''}" + path = os.path.join("uploads", slug) + with open(path, "wb") as f: + f.write(data) + return path + +def _list_sheets_with_headers(xlsx_path: str) -> List[Dict[str, Any]]: + import openpyxl + wb = openpyxl.load_workbook(xlsx_path, data_only=True) + out: List[Dict[str, Any]] = [] + for ws in wb.worksheets: + header_row_idx = None + header_vals = [] + auto_map = {} + for r in range(1, min(ws.max_row, 50) + 1): + vals = [cell.value for cell in ws[r]] + header_vals = vals + auto_map = detect_header_map(vals) + if auto_map: + header_row_idx = r + break + out.append({ + "sheet_name": ws.title, + "header_row_idx": header_row_idx, + "header_vals": header_vals, + "auto_map": auto_map, + }) + return out + +def _default_sheet_name(sheets_info: List[Dict[str, Any]]) -> Optional[str]: + names = [s["sheet_name"] for s in sheets_info] if sheets_info else [] + for nm in names: + if nm.lower().strip() == "final time clock report": + return nm + for s in (sheets_info or []): + if s.get("auto_map"): + return s["sheet_name"] + return names[0] if names else None + +# Helpers +def _hours_from_value(value) -> Decimal: + if value is None or value == "": + return D(0) + if isinstance(value, (int, float)): + val = float(value) + if val < 0: + val = 0.0 + hours = val * 24.0 if val <= 1.0 else val + return q2(D(hours)) + if isinstance(value, datetime): + t = value.time() + return q2(D(t.hour) + D(t.minute) / D(60) + D(t.second) / D(3600)) + if isinstance(value, time_type): + return q2(D(value.hour) + D(value.minute) / D(60) + D(value.second) / D(3600)) + if isinstance(value, str): + v = value.strip() + if ":" in v: + try: + parts = [int(p) for p in v.split(":")] + if len(parts) == 2: + h, m = parts + return q2(D(h) + D(m) / D(60)) + elif len(parts) == 3: + h, m, s = parts + return q2(D(h) + D(m) / D(60) + D(s) / D(3600)) + except Exception: + pass + try: + return q2(D(v)) + except Exception: + return D(0) + return D(0) + +def _find_header_index(header_vals: List[Any], include: List[str]) -> Optional[int]: + hn = [str(v).strip().lower() if v is not None else "" for v in header_vals] + for i, h in enumerate(hn): + if all(tok in h for tok in include): + return i + return None + +def _parse_rows_with_mapping(xlsx_path: str, sheet_name: str, header_row_idx: int, mapping: Dict[str, Optional[int]]) -> List[Dict]: + import openpyxl + wb = openpyxl.load_workbook(xlsx_path, data_only=True) + ws = wb[sheet_name] + rows_out: List[Dict] = [] + + # Detect helpful headers (even if hidden in UI) + header_vals = [cell.value for cell in ws[header_row_idx]] + idx_break_start = _find_header_index(header_vals, ["break", "start", "time"]) + idx_break_end = _find_header_index(header_vals, ["break", "end", "time"]) + idx_total_minus_break = _find_header_index(header_vals, ["hours", "worked", "minus", "break"]) + idx_shift_start = _find_header_index(header_vals, ["shift", "start", "time"]) + idx_shift_end = _find_header_index(header_vals, ["shift", "end", "time"]) + idx_hours_scheduled = _find_header_index(header_vals, ["hours", "scheduled"]) + + max_row = ws.max_row or 0 + for r in range((header_row_idx or 1) + 1, max_row + 1): + vals = [cell.value for cell in ws[r]] + + def getv(key: str): + idx = mapping.get(key) + if idx is None: + return None + return vals[idx] if idx < len(vals) else None + + def get_header(idx: Optional[int]): + if idx is None: + return None + return vals[idx] if idx < len(vals) else None + + emp_raw = getv("employee") + date_raw = getv("work_date") + if not emp_raw or not date_raw: + continue + + employee_name = str(emp_raw).strip() + work_date = parse_date(date_raw) + if not work_date: + continue + + clock_in_dt = None + clock_out_dt = None + + ci_raw = getv("clock_in") + co_raw = getv("clock_out") + if ci_raw is not None: + clock_in_dt = parse_datetime_value(ci_raw) + if not clock_in_dt: + t = parse_time_value(ci_raw) + clock_in_dt = datetime.combine(work_date, t) if t else None + if co_raw is not None: + clock_out_dt = parse_datetime_value(co_raw) + if not clock_out_dt: + t = parse_time_value(co_raw) + clock_out_dt = datetime.combine(work_date, t) if t else None + if clock_in_dt and clock_out_dt and clock_out_dt <= clock_in_dt: + clock_out_dt = clock_out_dt + timedelta(days=1) + + # Break hours from mapped column + break_hours_taken = safe_decimal(getv("break_hours"), D(0)) + + # If blank/zero, derive from Break Start/End + if (break_hours_taken is None) or (break_hours_taken == D(0)): + bs_raw = get_header(idx_break_start) + be_raw = get_header(idx_break_end) + start_dt = None + end_dt = None + if bs_raw is not None: + start_dt = parse_datetime_value(bs_raw) + if not start_dt: + bt = parse_time_value(bs_raw) + start_dt = datetime.combine(work_date, bt) if bt else None + if be_raw is not None: + end_dt = parse_datetime_value(be_raw) + if not end_dt: + et = parse_time_value(be_raw) + end_dt = datetime.combine(work_date, et) if et else None + if start_dt and end_dt: + if end_dt <= start_dt: + end_dt = end_dt + timedelta(days=1) + break_hours_taken = q2(D((end_dt - start_dt).total_seconds()) / D(3600)) + + # Total hours ("Hours Worked") + total_raw = getv("total_hours") if mapping.get("total_hours") is not None else None + total_from_sheet = safe_decimal(total_raw) if (total_raw not in (None, "")) else None + + # Fallback from "Hours Worked Minus Break" (hidden in UI but auto-detected) + alt_raw = getv("total_minus_break") if mapping.get("total_minus_break") is not None else get_header(idx_total_minus_break) + alt_total_minus_break = safe_decimal(alt_raw) if (alt_raw not in (None, "")) else None + + # Scheduled hours: use explicit "Hours Scheduled" or derive from Shift Start/End + scheduled_hours = None + hs_raw = get_header(idx_hours_scheduled) + if hs_raw not in (None, ""): + scheduled_hours = safe_decimal(hs_raw, D(0)) + else: + ss_raw = get_header(idx_shift_start) + se_raw = get_header(idx_shift_end) + ss_dt = None + se_dt = None + if ss_raw is not None: + ss_dt = parse_datetime_value(ss_raw) + if not ss_dt: + st = parse_time_value(ss_raw) + ss_dt = datetime.combine(work_date, st) if st else None + if se_raw is not None: + se_dt = parse_datetime_value(se_raw) + if not se_dt: + et = parse_time_value(se_raw) + se_dt = datetime.combine(work_date, et) if et else None + if ss_dt and se_dt: + if se_dt <= ss_dt: + se_dt = se_dt + timedelta(days=1) + scheduled_hours = q2(D((se_dt - ss_dt).total_seconds()) / D(3600)) + + pto_hours = safe_decimal(getv("pto_hours"), D(0)) + pto_type_val = getv("pto_type") + pto_type = (str(pto_type_val).strip() if pto_type_val is not None and not isinstance(pto_type_val, (int, float, datetime)) else None) + + holiday_hours = safe_decimal(getv("holiday_hours"), D(0)) + bereavement_hours = safe_decimal(getv("bereavement_hours"), D(0)) + + # Determine Total Hours (Hours Worked) + if total_from_sheet is None: + if alt_total_minus_break is not None and break_hours_taken is not None: + total_from_sheet = q2(alt_total_minus_break + break_hours_taken) + else: + if clock_in_dt and clock_out_dt: + total_from_sheet = q2(D((clock_out_dt - clock_in_dt).total_seconds()) / D(3600)) + else: + total_from_sheet = D(0) + else: + total_from_sheet = q2(total_from_sheet) + + if pto_hours > D(0): + pto_type = None + + rows_out.append({ + "employee_name": employee_name, + "work_date": work_date, + "clock_in": clock_in_dt, + "clock_out": clock_out_dt, + "break_hours": q2(break_hours_taken or D(0)), + "total_hours": q2(total_from_sheet), + "scheduled_hours": q2(scheduled_hours or D(0)), + "pto_hours": q2(pto_hours), + "pto_type": pto_type, + "holiday_hours": q2(holiday_hours), + "bereavement_hours": q2(bereavement_hours), + }) + + return rows_out + +def _csv_headers_and_map(csv_bytes: bytes) -> Tuple[List[str], Dict[str, int]]: + text_stream = io.StringIO(csv_bytes.decode("utf-8", errors="replace")) + reader = csv.reader(text_stream) + rows = list(reader) + if not rows: + return [], {} + header_vals = rows[0] + auto_map = detect_header_map(header_vals) + return header_vals, auto_map + +def _parse_csv_with_mapping(csv_path: str, mapping: Dict[str, Optional[int]]) -> List[Dict]: + rows_out: List[Dict] = [] + with open(csv_path, "r", encoding="utf-8", errors="replace") as f: + reader = csv.reader(f) + all_rows = list(reader) + if not all_rows: + return rows_out + + header_vals = all_rows[0] + idx_break_start = _find_header_index(header_vals, ["break", "start", "time"]) + idx_break_end = _find_header_index(header_vals, ["break", "end", "time"]) + idx_total_minus_break = _find_header_index(header_vals, ["hours", "worked", "minus", "break"]) + idx_shift_start = _find_header_index(header_vals, ["shift", "start", "time"]) + idx_shift_end = _find_header_index(header_vals, ["shift", "end", "time"]) + idx_hours_scheduled = _find_header_index(header_vals, ["hours", "scheduled"]) + + body = all_rows[1:] + for vals in body: + def getv(key: str): + idx = mapping.get(key) + if idx is None: + return None + if idx < 0 or idx >= len(vals): + return None + return vals[idx] + + def get_header(idx: Optional[int]): + if idx is None or idx < 0 or idx >= len(vals): + return None + return vals[idx] + + emp_raw = getv("employee") + date_raw = getv("work_date") + if not emp_raw or not date_raw: + continue + + employee_name = str(emp_raw).strip() + work_date = parse_date(date_raw) + if not work_date: + continue + + clock_in_dt = None + clock_out_dt = None + + ci_raw = getv("clock_in") + co_raw = getv("clock_out") + if ci_raw is not None: + clock_in_dt = parse_datetime_value(ci_raw) + if not clock_in_dt: + t = parse_time_value(ci_raw) + clock_in_dt = datetime.combine(work_date, t) if t else None + if co_raw is not None: + clock_out_dt = parse_datetime_value(co_raw) + if not clock_out_dt: + t = parse_time_value(co_raw) + clock_out_dt = datetime.combine(work_date, t) if t else None + if clock_in_dt and clock_out_dt and clock_out_dt <= clock_in_dt: + clock_out_dt = clock_out_dt + timedelta(days=1) + + break_hours_taken = safe_decimal(getv("break_hours"), D(0)) + + if (break_hours_taken is None) or (break_hours_taken == D(0)): + bs_raw = get_header(idx_break_start) + be_raw = get_header(idx_break_end) + start_dt = None + end_dt = None + if bs_raw is not None: + start_dt = parse_datetime_value(bs_raw) + if not start_dt: + bt = parse_time_value(bs_raw) + start_dt = datetime.combine(work_date, bt) if bt else None + if be_raw is not None: + end_dt = parse_datetime_value(be_raw) + if not end_dt: + et = parse_time_value(be_raw) + end_dt = datetime.combine(work_date, et) if et else None + if start_dt and end_dt: + if end_dt <= start_dt: + end_dt = end_dt + timedelta(days=1) + break_hours_taken = q2(D((end_dt - start_dt).total_seconds()) / D(3600)) + + total_raw = getv("total_hours") if mapping.get("total_hours") is not None else None + total_from_sheet = safe_decimal(total_raw) if (total_raw not in (None, "")) else None + + alt_raw = getv("total_minus_break") if mapping.get("total_minus_break") is not None else get_header(idx_total_minus_break) + alt_total_minus_break = safe_decimal(alt_raw) if (alt_raw not in (None, "")) else None + + # Scheduled hours + scheduled_hours = None + hs_raw = get_header(idx_hours_scheduled) + if hs_raw not in (None, ""): + scheduled_hours = safe_decimal(hs_raw, D(0)) + else: + ss_raw = get_header(idx_shift_start) + se_raw = get_header(idx_shift_end) + ss_dt = None + se_dt = None + if ss_raw is not None: + ss_dt = parse_datetime_value(ss_raw) + if not ss_dt: + st = parse_time_value(ss_raw) + ss_dt = datetime.combine(work_date, st) if st else None + if se_raw is not None: + se_dt = parse_datetime_value(se_raw) + if not se_dt: + et = parse_time_value(se_raw) + se_dt = datetime.combine(work_date, et) if et else None + if ss_dt and se_dt: + if se_dt <= ss_dt: + se_dt = se_dt + timedelta(days=1) + scheduled_hours = q2(D((se_dt - ss_dt).total_seconds()) / D(3600)) + + pto_hours = safe_decimal(getv("pto_hours"), D(0)) + pto_type_val = getv("pto_type") + pto_type = (str(pto_type_val).strip() if pto_type_val is not None else None) + + holiday_hours = safe_decimal(getv("holiday_hours"), D(0)) + bereavement_hours = safe_decimal(getv("bereavement_hours"), D(0)) + + if total_from_sheet is None: + if alt_total_minus_break is not None and break_hours_taken is not None: + total_from_sheet = q2(alt_total_minus_break + break_hours_taken) + else: + if clock_in_dt and clock_out_dt: + total_from_sheet = q2(D((clock_out_dt - clock_in_dt).total_seconds()) / D(3600)) + else: + total_from_sheet = D(0) + else: + total_from_sheet = q2(total_from_sheet) + + if pto_hours > D(0): + pto_type = None + + rows_out.append({ + "employee_name": employee_name, + "work_date": work_date, + "clock_in": clock_in_dt, + "clock_out": clock_out_dt, + "break_hours": q2(break_hours_taken or D(0)), + "total_hours": q2(total_from_sheet), + "scheduled_hours": q2(scheduled_hours or D(0)), + "pto_hours": q2(pto_hours), + "pto_type": pto_type, + "holiday_hours": q2(holiday_hours), + "bereavement_hours": q2(bereavement_hours), + }) + return rows_out + +# ------------------------- +# Routes +# ------------------------- + +def _active_timesheet(db: Session) -> Optional[TimesheetPeriod]: + sheets = enumerate_timesheets_global(db) + if not sheets: + return None + tid = sheets[-1][0] + return db.query(TimesheetPeriod).get(tid) + +def _within_period(d: date_type, ts: TimesheetPeriod) -> bool: + return ts.period_start <= d <= ts.period_end + +def _dedup_exists(db: Session, employee_id: int, timesheet_id: int, work_date: date_type, clock_in: Optional[datetime], clock_out: Optional[datetime]) -> bool: + q = db.query(TimeEntry).filter( + TimeEntry.employee_id == employee_id, + TimeEntry.timesheet_id == timesheet_id, + TimeEntry.work_date == work_date, + ) + for r in q.all(): + if (r.clock_in or None) == (clock_in or None) and (r.clock_out or None) == (clock_out or None): + return True + return False + +@router.get("", response_class=HTMLResponse) +def importer_home(request: Request, db: Session = Depends(get_session), timesheet_id: Optional[int] = Query(None)): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + sheets = enumerate_timesheets_global(db) + period_options = [{"timesheet_id": tid, "display": (name or f"{ps}..{pe}")} for tid, ps, pe, name in sheets] + active_ts = db.query(TimesheetPeriod).get(timesheet_id) if timesheet_id else _active_timesheet(db) + return request.app.state.templates.TemplateResponse( + "dept_importer_upload.html", + {"request": request, "period_options": period_options, "active_ts": active_ts.id if active_ts else None}, + ) + +@router.post("/upload", response_class=HTMLResponse) +async def importer_upload( + request: Request, + file: UploadFile = File(...), + timesheet_id: int = Form(...), + restrict_to_period: int = Form(1), + db: Session = Depends(get_session), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + data = await file.read() + ext = os.path.splitext(file.filename.lower())[1] + if ext not in (".xlsx", ".xlsm", ".xls", ".csv", ".txt"): + raise HTTPException(status_code=400, detail="Unsupported file type. Please upload XLSX/XLS or CSV/TXT.") + uploaded_path = _store_upload_file(data, file.filename) + + ctx: Dict[str, Any] = { + "kind": "excel" if ext in (".xlsx", ".xlsm", ".xls") else "csv", + "path": uploaded_path, + "timesheet_id": timesheet_id, + "restrict_to_period": int(restrict_to_period), + "mode": "department", + } + + if ctx["kind"] == "excel": + sheets_info = _list_sheets_with_headers(uploaded_path) + if not sheets_info: + raise HTTPException(status_code=400, detail="No sheets found in workbook.") + default_sheet = _default_sheet_name(sheets_info) or sheets_info[0]["sheet_name"] + ctx["sheets_info"] = sheets_info + ctx["default_sheet"] = default_sheet + else: + header_vals, auto_map = _csv_headers_and_map(data) + if not header_vals: + raise HTTPException(status_code=400, detail="Empty CSV") + ctx["sheets_info"] = [{ + "sheet_name": "CSV", + "header_row_idx": 1, + "header_vals": header_vals, + "auto_map": auto_map, + }] + ctx["default_sheet"] = "CSV" + + os.makedirs("uploads", exist_ok=True) + map_slug = f"dept-mapctx-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{os.getpid()}.json" + map_path = os.path.join("uploads", map_slug) + with open(map_path, "w", encoding="utf-8") as f: + json.dump(ctx, f) + + return request.app.state.templates.TemplateResponse( + "dept_importer_map.html", + { + "request": request, + "map_slug": map_slug, + "timesheet_id": timesheet_id, + "restrict_to_period": int(restrict_to_period), + "sheets_info": ctx["sheets_info"], + "sheet_name": ctx["default_sheet"], + "target_fields": TARGET_FIELDS, + "kind": ctx["kind"], + "mode": ctx["mode"], + }, + ) + +@router.get("/start-initial", response_class=HTMLResponse) +def start_initial_mapping( + request: Request, + timesheet_id: int = Query(...), + src: str = Query(...), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + if not os.path.exists(src): + raise HTTPException(status_code=400, detail="Uploaded file not found; please re-upload.") + + _, ext = os.path.splitext(src.lower()) + if ext not in (".xlsx", ".xlsm", ".xls", ".csv", ".txt"): + raise HTTPException(status_code=400, detail="Unsupported file type.") + + ctx: Dict[str, Any] = { + "kind": "excel" if ext in (".xlsx", ".xlsm", ".xls") else "csv", + "path": src, + "timesheet_id": timesheet_id, + "restrict_to_period": 0, + "mode": "initial", + } + + if ctx["kind"] == "excel": + sheets_info = _list_sheets_with_headers(src) + if not sheets_info: + raise HTTPException(status_code=400, detail="No sheets found in workbook.") + default_sheet = _default_sheet_name(sheets_info) or sheets_info[0]["sheet_name"] + ctx["sheets_info"] = sheets_info + ctx["default_sheet"] = default_sheet + else: + with open(src, "rb") as f: + data = f.read() + header_vals, auto_map = _csv_headers_and_map(data) + if not header_vals: + raise HTTPException(status_code=400, detail="Empty CSV") + ctx["sheets_info"] = [{ + "sheet_name": "CSV", + "header_row_idx": 1, + "header_vals": header_vals, + "auto_map": auto_map, + }] + ctx["default_sheet"] = "CSV" + + os.makedirs("uploads", exist_ok=True) + map_slug = f"dept-mapctx-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{os.getpid()}.json" + map_path = os.path.join("uploads", map_slug) + with open(map_path, "w", encoding="utf-8") as f: + json.dump(ctx, f) + + return request.app.state.templates.TemplateResponse( + "dept_importer_map.html", + { + "request": request, + "map_slug": map_slug, + "timesheet_id": timesheet_id, + "restrict_to_period": 0, + "sheets_info": ctx["sheets_info"], + "sheet_name": ctx["default_sheet"], + "target_fields": TARGET_FIELDS, + "kind": ctx["kind"], + "mode": ctx["mode"], + }, + ) + +@router.get("/map", response_class=HTMLResponse) +def importer_map_get( + request: Request, + map_slug: str = Query(...), + sheet_name: Optional[str] = Query(None), +): + path = os.path.join("uploads", map_slug) + if not os.path.exists(path): + raise HTTPException(status_code=400, detail="Mapping context expired. Please re-upload.") + + with open(path, "r", encoding="utf-8") as f: + ctx = json.load(f) + + sheets_info = ctx.get("sheets_info") or [] + timesheet_id = ctx.get("timesheet_id") + restrict_to_period = int(ctx.get("restrict_to_period") or 1) + selected = sheet_name or ctx.get("default_sheet") + return request.app.state.templates.TemplateResponse( + "dept_importer_map.html", + { + "request": request, + "map_slug": map_slug, + "timesheet_id": timesheet_id, + "restrict_to_period": restrict_to_period, + "sheets_info": sheets_info, + "sheet_name": selected, + "target_fields": TARGET_FIELDS, + "kind": ctx.get("kind") or "excel", + "mode": ctx.get("mode") or "department", + }, + ) + +@router.post("/preview-mapped", response_class=HTMLResponse) +async def importer_preview_mapped( + request: Request, + map_slug: str = Form(...), + timesheet_id: int = Form(...), + sheet_name: str = Form(...), + restrict_to_period: int = Form(1), + mode: str = Form("department"), + db: Session = Depends(get_session), + employee: Optional[str] = Form(None), + work_date: Optional[str] = Form(None), + clock_in: Optional[str] = Form(None), + clock_out: Optional[str] = Form(None), + break_hours: Optional[str] = Form(None), + total_hours: Optional[str] = Form(None), + total_minus_break: Optional[str] = Form(None), # hidden in UI; may be None + pto_hours: Optional[str] = Form(None), + pto_type: Optional[str] = Form(None), + holiday_hours: Optional[str] = Form(None), + bereavement_hours: Optional[str] = Form(None), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + ctx_path = os.path.join("uploads", map_slug) + if not os.path.exists(ctx_path): + raise HTTPException(status_code=400, detail="Mapping context expired. Please re-upload.") + + with open(ctx_path, "r", encoding="utf-8") as f: + ctx = json.load(f) + + kind = ctx.get("kind") or "excel" + src_path = ctx.get("path") + sheets_info = ctx.get("sheets_info") or [] + sheet_info = next((s for s in sheets_info if s.get("sheet_name") == sheet_name), None) + if not sheet_info or not sheet_info.get("header_row_idx"): + raise HTTPException(status_code=400, detail="Selected sheet has no recognizable header row.") + + def to_idx(v: Optional[str]) -> Optional[int]: + if v is None: + return None + v = v.strip() + if not v or v.lower() == "none": + return None + try: + return int(v) + except Exception: + return None + + mapping = { + "employee": to_idx(employee), + "work_date": to_idx(work_date), + "clock_in": to_idx(clock_in), + "clock_out": to_idx(clock_out), + "break_hours": to_idx(break_hours), + "total_hours": to_idx(total_hours), + "total_minus_break": to_idx(total_minus_break), # may be None + "pto_hours": to_idx(pto_hours), + "pto_type": to_idx(pto_type), + "holiday_hours": to_idx(holiday_hours), + "bereavement_hours": to_idx(bereavement_hours), + } + + if mapping["employee"] is None or mapping["work_date"] is None: + raise HTTPException(status_code=400, detail="Please select both Employee and Work Date columns.") + + if kind == "excel": + norm_rows = _parse_rows_with_mapping(src_path, sheet_name, int(sheet_info["header_row_idx"]), mapping) + else: + norm_rows = _parse_csv_with_mapping(src_path, mapping) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + total_before_filter = len(norm_rows) + + filtered_rows = norm_rows + if restrict_to_period: + filtered_rows = [r for r in norm_rows if _within_period(r["work_date"], ts)] + if not filtered_rows and total_before_filter > 0: + filtered_rows = norm_rows + + by_emp: Dict[str, List[Dict]] = {} + for r in filtered_rows: + by_emp.setdefault(r["employee_name"], []).append(r) + + preview = [] + for name, rows in sorted(by_emp.items(), key=lambda kv: kv[0].lower()): + emp = db.query(Employee).filter(func.lower(Employee.name) == func.lower(name)).first() + has_any_in_period = False + if emp: + has_any_in_period = db.query(TimeEntry).filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id).first() is not None + preview.append({ + "employee_name": name, + "status": ("Existing in period" if has_any_in_period else ("Existing employee" if emp else "New employee")), + "existing_employee_id": emp.id if emp else None, + "row_count": len(rows), + }) + + os.makedirs("uploads", exist_ok=True) + slug = f"dept-import-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{os.getpid()}.json" + path = os.path.join("uploads", slug) + def enc(o): + if isinstance(o, (datetime, date_type)): + return o.isoformat() + return str(o) + with open(path, "w", encoding="utf-8") as f: + json.dump({"timesheet_id": timesheet_id, "rows": filtered_rows}, f, default=enc) + + return request.app.state.templates.TemplateResponse( + "dept_importer_preview.html", + {"request": request, "slug": slug, "timesheet_id": timesheet_id, "preview": preview, "mode": mode}, + ) + +@router.post("/execute") +async def importer_execute( + request: Request, + slug: str = Form(...), + timesheet_id: int = Form(...), + selected_names: str = Form(...), + mode: str = Form("department"), + db: Session = Depends(get_session), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + path = os.path.join("uploads", slug) + if not os.path.exists(path): + raise HTTPException(status_code=400, detail="Import context expired. Please re-upload.") + + with open(path, "r", encoding="utf-8") as f: + payload = json.load(f) + + if int(payload.get("timesheet_id")) != int(timesheet_id): + raise HTTPException(status_code=400, detail="Timesheet mismatch. Please re-upload.") + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + def conv(r: Dict) -> Dict: + ci = r.get("clock_in") + co = r.get("clock_out") + wd = r.get("work_date") + return { + "employee_name": r.get("employee_name"), + "work_date": date_type.fromisoformat(wd) if isinstance(wd, str) else wd, + "clock_in": (datetime.fromisoformat(ci) if isinstance(ci, str) else ci) if ci else None, + "clock_out": (datetime.fromisoformat(co) if isinstance(co, str) else co) if co else None, + "break_hours": float(r.get("break_hours") or 0), + "total_hours": float(r.get("total_hours") or 0), + "scheduled_hours": float(r.get("scheduled_hours") or 0), + "pto_hours": float(r.get("pto_hours") or 0), + "pto_type": (r.get("pto_type") or None), + "holiday_hours": float(r.get("holiday_hours") or 0), + "bereavement_hours": float(r.get("bereavement_hours") or 0), + } + + rows = [conv(r) for r in (payload.get("rows") or [])] + + selected_set = {s.strip() for s in (selected_names or "").split(",") if s.strip()} + rows = [r for r in rows if r["employee_name"] in selected_set] + + week_rows = db.execute(text("SELECT day_date, week_number FROM week_assignments WHERE timesheet_id = :tid"), {"tid": timesheet_id}).fetchall() + week_map: Dict[date_type, int] = {row[0]: int(row[1]) for row in week_rows} + + batch = ImportBatch(timesheet_id=timesheet_id, source_name=f"Department import {slug}", created_at=datetime.utcnow()) + db.add(batch) + db.flush() + + inserted_count = 0 + by_emp: Dict[str, List[Dict]] = {} + for r in rows: + by_emp.setdefault(r["employee_name"], []).append(r) + + min_date = None + max_date = None + + for name, erows in by_emp.items(): + emp = db.query(Employee).filter(func.lower(Employee.name) == func.lower(name)).first() + if not emp: + emp = Employee(name=name) + db.add(emp) + db.flush() + + for r in erows: + wd: date_type = r["work_date"] + if min_date is None or wd < min_date: + min_date = wd + if max_date is None or wd > max_date: + max_date = wd + + if wd not in week_map and mode == "department": + continue + + ci = r["clock_in"] + co = r["clock_out"] + + # Keep rows that indicate "scheduled but no show": scheduled_hours > 0 and no clocks or special hours + if (ci is None and co is None) and (r["pto_hours"] <= 0) and (r["holiday_hours"] <= 0) and (r["bereavement_hours"] <= 0): + if r.get("scheduled_hours", 0) <= 0: + # No clocks, no scheduled hours, nothing else -> skip + continue + # Otherwise, keep the row (total stays as provided/computed; paid will be zero) + # Incomplete clocks: skip unless PTO/holiday/bereavement-only + if (ci is None) ^ (co is None): + if (r["pto_hours"] <= 0) and (r["holiday_hours"] <= 0) and (r["bereavement_hours"] <= 0): + continue + + if ci and co and co <= ci: + co = co + timedelta(days=1) + + if (r.get("holiday_hours") or 0) > 0: + ci = None + co = None + + if _dedup_exists(db, emp.id, timesheet_id, wd, ci, co): + continue + + total_hours = q2(D(r["total_hours"] or 0)) + brk = q2(D(r["break_hours"] or 0)) + pto = q2(D(r["pto_hours"] or 0)) + hol = q2(D(r["holiday_hours"] or 0)) + ber = q2(D(r["bereavement_hours"] or 0)) + + worked = q2(D(total_hours) - D(brk)) + if worked < D(0): + worked = q2(D(0)) + hours_paid = q2(worked + D(pto) + D(hol) + D(ber)) + + te = TimeEntry( + employee_id=emp.id, + timesheet_id=timesheet_id, + work_date=wd, + clock_in=ci, + clock_out=co, + break_hours=brk, + total_hours=total_hours, + pto_hours=pto, + pto_type=(r["pto_type"] or None), + holiday_hours=hol, + bereavement_hours=ber, + hours_paid=hours_paid, + ) + db.add(te) + db.flush() + db.add(ImportBatchItem(batch_id=batch.id, time_entry_id=te.id)) + inserted_count += 1 + + db.commit() + try: + os.remove(path) + except Exception: + pass + + if mode == "initial": + if min_date is None or max_date is None: + row = db.query(func.min(TimeEntry.work_date), func.max(TimeEntry.work_date)).filter(TimeEntry.timesheet_id == timesheet_id).one() + min_date, max_date = row[0], row[1] + + if not min_date: + return RedirectResponse(url=f"/upload?error=No+rows+found+after+import", status_code=303) + + from .utils import _semi_monthly_period_for_date as semi + ps1, pe1 = semi(min_date) + ps2, pe2 = semi(max_date or min_date) + ps, pe = (ps2, pe2) if (ps1, pe1) != (ps2, pe2) else (ps1, pe1) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + ts.period_start = ps + ts.period_end = pe + db.commit() + + return RedirectResponse(url=f"/assign-weeks?timesheet_id={timesheet_id}", status_code=303) + + msg = f"Imported {inserted_count} time entries from department file." + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&msg={msg}", status_code=303) + +@router.post("/undo-last") +def importer_undo_last(request: Request, timesheet_id: int = Form(...), db: Session = Depends(get_session)): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + batch = db.query(ImportBatch).filter(ImportBatch.timesheet_id == timesheet_id).order_by(ImportBatch.created_at.desc()).first() + if not batch: + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&msg=No+department+imports+to+undo", status_code=303) + items = db.query(ImportBatchItem).filter(ImportBatchItem.batch_id == batch.id).all() + ids = [it.time_entry_id for it in items] + if items: + db.query(ImportBatchItem).filter(ImportBatchItem.batch_id == batch.id).delete(synchronize_session=False) + if ids: + db.query(TimeEntry).filter(TimeEntry.id.in_(ids)).delete(synchronize_session=False) + db.delete(batch) + db.commit() + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&msg=Undid+last+department+import", status_code=303) \ No newline at end of file diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..f3a77c1 --- /dev/null +++ b/app/main.py @@ -0,0 +1,2531 @@ +import os +import csv +from collections import defaultdict +from uuid import uuid4 +from datetime import datetime, date, timedelta +from typing import Optional, List, Set +from decimal import Decimal, ROUND_HALF_UP +from urllib.parse import quote +from io import BytesIO + +from fastapi import FastAPI, Request, Depends, Form, UploadFile, File, HTTPException, Query +from fastapi.responses import RedirectResponse, HTMLResponse, JSONResponse, StreamingResponse +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates +from starlette.middleware.sessions import SessionMiddleware +from sqlalchemy import func, text, Column, Integer, Date, String, DateTime, Numeric +from sqlalchemy.orm import Session + +from .db import engine, SessionLocal, get_session, ping_db +from .models import ( + Base, + User, + Employee, + TimeEntry, + WeekAssignment, + TimesheetStatus, + EmployeePeriodSetting, + TimesheetPeriod, + DuplicateReview, + PTOAccount, + PTOAdjustment, +) +from .auth import hash_password, verify_and_update_password, login_required +from .utils import ( + D, + q2, + group_entries_for_timesheet, + enumerate_timesheets_global, + _semi_monthly_period_for_date, +) +from .payroll_export import build_overview_xlsx + +# Attendance router (keeps this file small) +from .attendance import router as attendance_router +# Department upload router (moved out of main.py) +from .dept_importer import router as dept_router + +SECRET_KEY = os.getenv("SECRET_KEY", "please-change-me") +DEFAULT_ADMIN_USER = os.getenv("DEFAULT_ADMIN_USER", "Admin") +DEFAULT_ADMIN_PASSWORD = os.getenv("DEFAULT_ADMIN_PASSWORD", "1Senior!") +PORT = int(os.getenv("PORT", "5070")) +APP_TZ = os.getenv("APP_TZ", "America/New_York") + +# ----------------------- +# Templates / app bootstrap +# ----------------------- +app = FastAPI(title="TimeKeeper") +app.add_middleware(SessionMiddleware, secret_key=SECRET_KEY) + +app.mount("/static", StaticFiles(directory="app/static"), name="static") +templates = Jinja2Templates(directory="app/templates") +app.state.templates = templates + +# Smart loader: prefer UTF-8, fallback to cp1252 for Windows-saved files +try: + from jinja2.loaders import FileSystemLoader + + class SmartLoader(FileSystemLoader): + def get_source(self, environment, template): + try: + self.encoding = "utf-8" + return super().get_source(environment, template) + except UnicodeDecodeError: + self.encoding = "cp1252" + return super().get_source(environment, template) + + templates.env.loader = SmartLoader("app/templates") +except Exception: + pass + +# ----------------------- +# Jinja filters (formatting) +# ----------------------- +def fmt2(x): + if x is None: + return "0.00" + try: + d = Decimal(str(x)) + except Exception: + return str(x) + return str(d.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)) + + +def fmt_time(x): + if not x: + return "" + try: + if isinstance(x, datetime): + return x.strftime("%I:%M:%S %p") + s = str(x) + for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S", "%H:%M:%S.%f", "%H:%M:%S"): + try: + dt = datetime.strptime(s, fmt) + return dt.strftime("%I:%M:%S %p") + except Exception: + continue + base = s.split(".")[0] + parts = base.split() + tpart = parts[-1] if parts else base + try: + dt = datetime.strptime(tpart, "%H:%M:%S") + return dt.strftime("%I:%M:%S %p") + except Exception: + return tpart + except Exception: + return str(x) + + +def fmt_dt(x): + if not x: + return "" + try: + if isinstance(x, datetime): + return x.strftime("%b %d, %Y %I:%M:%S %p") + s = str(x) + for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y %H:%M:%S"): + try: + dt = datetime.strptime(s, fmt) + return dt.strftime("%b %d, %Y %I:%M:%S %p") + except Exception: + continue + return s + except Exception: + return str(x) + + +def fmt_excel_dt(x): + # Excel style "MM/DD/YYYY hh:mm AM/PM" + if not x: + return "" + try: + if isinstance(x, datetime): + return x.strftime("%m/%d/%Y %I:%M %p") + s = str(x).strip() + fmts = [ + "%m/%d/%Y %I:%M:%S %p", + "%m/%d/%Y %I:%M %p", + "%m/%d/%y %I:%M %p", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + "%m/%d/%Y %H:%M:%S", + "%m/%d/%Y %H:%M", + ] + for fmt in fmts: + try: + dt = datetime.strptime(s, fmt) + return dt.strftime("%m/%d/%Y %I:%M %p") + except Exception: + continue + return s + except Exception: + return str(x) + + +templates.env.filters["fmt2"] = fmt2 +templates.env.filters["fmt_time"] = fmt_time +templates.env.filters["fmt_dt"] = fmt_dt +templates.env.filters["fmt_excel_dt"] = fmt_excel_dt + +# ----------------------- +# Timezone helpers +# ----------------------- +try: + from zoneinfo import ZoneInfo + LOCAL_TZ = ZoneInfo(APP_TZ) +except Exception: + LOCAL_TZ = None + +def to_local(dt: Optional[datetime]) -> Optional[datetime]: + if not dt: + return None + try: + if LOCAL_TZ is None: + return dt + if dt.tzinfo is None: + dt = dt.replace(tzinfo=ZoneInfo("UTC")) + return dt.astimezone(LOCAL_TZ) + except Exception: + return dt + +# ----------------------- +# Review tables (dismiss banners without altering entries) +# ----------------------- +class LongShiftFlag(Base): + __tablename__ = "long_shift_flags" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + + +class LongShiftReview(Base): + __tablename__ = "long_shift_reviews" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + + +class PtoNeedFlag(Base): + __tablename__ = "pto_need_flags" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + + +class PtoReviewFlag(Base): + __tablename__ = "pto_review_flags" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + + +class HolidayReviewFlag(Base): + __tablename__ = "holiday_review_flags" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + +# ----------------------- +# Admin role + profile tables +# ----------------------- +class AdminUser(Base): + __tablename__ = "admin_users" + user_id = Column(Integer, primary_key=True, index=True) + + +class UserProfile(Base): + __tablename__ = "user_profiles" + user_id = Column(Integer, primary_key=True, index=True) + full_name = Column(String(255), nullable=True) + +# ----------------------- +# PTO usage exclusions +# ----------------------- +class PTOUsageExclusion(Base): + __tablename__ = "pto_usage_exclusions" + id = Column(Integer, primary_key=True, autoincrement=True) + employee_id = Column(Integer, nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + pto_type = Column(String(64), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + +# ----------------------- +# NEW: Payroll notes per employee per time period (for export) +# ----------------------- +class PayrollNote(Base): + __tablename__ = "payroll_notes" + id = Column(Integer, primary_key=True, autoincrement=True) + timesheet_id = Column(Integer, nullable=False, index=True) + employee_id = Column(Integer, nullable=False, index=True) + reimbursement_amount = Column(Numeric(10, 2), nullable=True) + additional_payroll_amount = Column(Numeric(10, 2), nullable=True) + notes = Column(String(2000), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + +# ----------------------- +# Holiday helpers (top-level) +# ----------------------- +def _flagged_holiday_dates(db: Session, timesheet_id: int, employee_id: int) -> set: + return { + r[0] + for r in db.query(TimeEntry.work_date) + .filter( + TimeEntry.timesheet_id == timesheet_id, + TimeEntry.employee_id == employee_id, + TimeEntry.holiday_hours > 0, + ) + .group_by(TimeEntry.work_date) + .all() + } + +def _reviewed_holiday_dates(db: Session, timesheet_id: int, employee_id: int) -> set: + return { + r.work_date + for r in db.query(HolidayReviewFlag) + .filter( + HolidayReviewFlag.timesheet_id == timesheet_id, + HolidayReviewFlag.employee_id == employee_id, + ) + .all() + } + +def _holiday_needs_rows(db: Session, timesheet_id: int, employee_id: int): + reviewed = _reviewed_holiday_dates(db, timesheet_id, employee_id) + return ( + db.query(TimeEntry) + .filter( + TimeEntry.timesheet_id == timesheet_id, + TimeEntry.employee_id == employee_id, + TimeEntry.holiday_hours > 0, + ~TimeEntry.work_date.in_(reviewed), + ) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + +# ----------------------- +# Startup / schema +# ----------------------- +def ensure_schema(): + with engine.connect() as conn: + # Backup columns for times hidden by PTO status in print view + conn.execute(text("ALTER TABLE time_entries ADD COLUMN IF NOT EXISTS pto_clock_in_backup TIMESTAMP NULL")) + conn.execute(text("ALTER TABLE time_entries ADD COLUMN IF NOT EXISTS pto_clock_out_backup TIMESTAMP NULL")) + + # Year columns for per-year PTO tracking + conn.execute(text("ALTER TABLE pto_accounts ADD COLUMN IF NOT EXISTS year INTEGER")) + conn.execute(text("ALTER TABLE pto_adjustments ADD COLUMN IF NOT EXISTS year INTEGER")) + + # Backfill existing rows to current year if null + conn.execute(text("UPDATE pto_accounts SET year = EXTRACT(YEAR FROM NOW())::INTEGER WHERE year IS NULL")) + conn.execute(text("UPDATE pto_adjustments SET year = EXTRACT(YEAR FROM NOW())::INTEGER WHERE year IS NULL")) + + # SAFEGUARD: Drop incorrect unique index that enforces only one row per employee + conn.execute(text("DROP INDEX IF EXISTS ix_pto_accounts_employee_id")) + + # Enforce one starting balance per employee per year + conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS idx_pto_accounts_emp_year ON pto_accounts (employee_id, year)")) + + # Employee active status + termination date + conn.execute(text("ALTER TABLE employees ADD COLUMN IF NOT EXISTS is_active BOOLEAN NOT NULL DEFAULT TRUE")) + conn.execute(text("ALTER TABLE employees ADD COLUMN IF NOT EXISTS termination_date DATE NULL")) + conn.execute(text("UPDATE employees SET is_active = TRUE WHERE IS_ACTIVE IS NULL")) + + # Payroll notes table created by SQLAlchemy metadata (no raw DDL here) + + conn.commit() + + +@app.on_event("startup") +def on_startup(): + try: + ping_db() + Base.metadata.create_all(bind=engine) + ensure_schema() + with SessionLocal() as s: + default = s.query(User).filter(User.username == DEFAULT_ADMIN_USER).first() + if not default: + default = User(username=DEFAULT_ADMIN_USER, password_hash=hash_password(DEFAULT_ADMIN_PASSWORD)) + s.add(default) + s.flush() + if not s.query(AdminUser).filter(AdminUser.user_id == default.id).first(): + s.add(AdminUser(user_id=default.id)) + s.commit() + print("[startup] Timekeeper ready.") + except Exception as ex: + print(f"[startup] Database initialization failed: {ex}") + +# ----------------------- +# Admin helpers +# ----------------------- +def current_is_admin(db: Session, user_id: Optional[int]) -> bool: + if not user_id: + return False + return bool(db.query(AdminUser).filter(AdminUser.user_id == user_id).first()) + + +def require_admin_edit(request: Request, db: Session): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Read-only user: edit action not permitted") + +# ----------------------- +# Basic routes (login, viewer, etc.) +# ----------------------- +@app.get("/health") +def health(): + try: + ping_db() + return JSONResponse({"status": "ok"}) + except Exception as ex: + return JSONResponse({"status": "error", "detail": str(ex)}, status_code=500) + + +@app.get("/", response_class=HTMLResponse) +def index(request: Request): + if request.session.get("user_id"): + return RedirectResponse(url="/viewer") + return RedirectResponse(url="/login") + + +@app.get("/login", response_class=HTMLResponse) +def login_page(request: Request): + return templates.TemplateResponse("login.html", {"request": request, "hide_nav_links": True}) + + +@app.post("/login") +def login( + request: Request, + username: str = Form(...), + password: str = Form(...), + db: Session = Depends(get_session), +): + try: + user = db.query(User).filter(User.username == username).first() + if not user: + return templates.TemplateResponse("login.html", {"request": request, "hide_nav_links": True, "error": "Invalid credentials"}, status_code=401) + + verified, new_hash = verify_and_update_password(password, user.password_hash) + if not verified: + return templates.TemplateResponse("login.html", {"request": request, "hide_nav_links": True, "error": "Invalid credentials"}, status_code=401) + + if new_hash: + user.password_hash = new_hash + db.commit() + + request.session["user_id"] = user.id + request.session["username"] = user.username + request.session["is_admin"] = current_is_admin(db, user.id) + return RedirectResponse(url="/viewer", status_code=303) + except Exception as ex: + print(f"[login] error: {ex}") + return templates.TemplateResponse( + "login.html", + {"request": request, "hide_nav_links": True, "error": f"Login failed due to server/database error: {ex}"}, + status_code=500, + ) + + +@app.get("/logout") +def logout(request: Request): + request.session.clear() + return RedirectResponse(url="/login", status_code=303) + +# -------- Upload -------- +@app.get("/upload", response_class=HTMLResponse) +@login_required +def upload_page(request: Request): + with SessionLocal() as db: + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + return templates.TemplateResponse("upload.html", {"request": request}) + + +@app.post("/upload") +@login_required +async def upload_file( + request: Request, + file: UploadFile = File(...), + timesheet_name: str = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + allowed_ext = (".xlsx", ".xlsm", ".xls", ".csv", ".txt") + if not file.filename.lower().endswith(allowed_ext): + return templates.TemplateResponse( + "upload.html", + {"request": request, "error": f"Unsupported file type. Please upload one of: {', '.join(allowed_ext)}"}, + status_code=400, + ) + + name = (timesheet_name or "").strip() + if not name: + return templates.TemplateResponse( + "upload.html", {"request": request, "error": "Time Period Name is required."}, status_code=400 + ) + + try: + contents = await file.read() + os.makedirs("uploads", exist_ok=True) + saved_path = os.path.join("uploads", f"{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{file.filename}") + with open(saved_path, "wb") as f: + f.write(contents) + + placeholder_date = date.today() + ts = TimesheetPeriod(period_start=placeholder_date, period_end=placeholder_date, name=name) + db.add(ts) + db.flush() + new_id = ts.id + db.commit() + + return RedirectResponse( + url=f"/import/department/start-initial?timesheet_id={new_id}&src={quote(saved_path, safe='')}", + status_code=303, + ) + + except Exception as ex: + print(f"[upload] staging failed: {ex}") + return templates.TemplateResponse("upload.html", {"request": request, "error": f"Staging failed: {ex}"}, status_code=500) + +# -------- Assign weeks -------- +@app.get("/assign-weeks", response_class=HTMLResponse) +@login_required +def assign_weeks_page( + request: Request, + timesheet_id: int = Query(...), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + days = [ + r[0] + for r in ( + db.query(TimeEntry.work_date) + .filter(TimeEntry.timesheet_id == timesheet_id) + .group_by(TimeEntry.work_date) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + ] + existing = { + wa.day_date: wa.week_number + for wa in db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + } + + return templates.TemplateResponse( + "assign_weeks.html", + { + "request": request, + "timesheet_id": timesheet_id, + "period": f"{ts.period_start.isoformat()}..{ts.period_end.isoformat()}", + "days": days, + "existing": existing, + "timesheet_name": ts.name or "", + }, + ) + + +@app.post("/assign-weeks") +@login_required +async def assign_weeks_submit( + request: Request, + timesheet_id: int = Form(...), + timesheet_name: Optional[str] = Form(None), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + form = await request.form() + for k, v in form.items(): + if not k.startswith("week_"): + continue + day_str = k[len("week_"):] + try: + day = date.fromisoformat(day_str) + week = int(v) + except Exception: + continue + + wa = ( + db.query(WeekAssignment) + .filter(WeekAssignment.timesheet_id == timesheet_id, WeekAssignment.day_date == day) + .first() + ) + if not wa: + db.add( + WeekAssignment( + timesheet_id=timesheet_id, + period_start=ts.period_start, + period_end=ts.period_end, + day_date=day, + week_number=week, + ) + ) + else: + wa.week_number = week + + if timesheet_name is not None: + ts.name = (timesheet_name or "").strip() or None + + db.commit() + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}", status_code=303) + +# -------- Decimal-safe helpers -------- +def _to_dec_opt(v: Optional[str]) -> Optional[Decimal]: + if v is None: + return None + try: + return q2(D(v)) + except Exception: + return None + +def _parse_money(v: Optional[str]) -> Optional[Decimal]: + """ + Parse currency-like input (e.g., "$370", "370.00", "370") to Decimal(0.01). + Returns None for blank. + """ + if v is None: + return None + s = (str(v) or "").strip() + if not s: + return None + # strip $ and commas + s = s.replace("$", "").replace(",", "") + try: + d = Decimal(s) + return d.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP) + except Exception: + return None + +# -------- Entry editing -------- +@app.post("/timesheet/update-entry") +@login_required +def update_entry( + request: Request, + entry_id: int = Form(...), + timesheet_id: int = Form(...), + employee_id: Optional[int] = Form(None), + total_hours: Optional[str] = Form(None), + break_hours: Optional[str] = Form(None), + pto_hours: Optional[str] = Form("0"), + pto_type: Optional[str] = Form(""), + holiday_hours: Optional[str] = Form(None), + bereavement_hours: Optional[str] = Form(None), + redirect_to: Optional[str] = Form(None), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + entry = db.query(TimeEntry).get(entry_id) + if not entry: + raise HTTPException(status_code=404, detail="Entry not found") + + th = _to_dec_opt(total_hours) + bh = _to_dec_opt(break_hours) + ph = _to_dec_opt(pto_hours) + hh = _to_dec_opt(holiday_hours) + oh = _to_dec_opt(bereavement_hours) + + if th is not None: + entry.total_hours = th + if bh is not None: + entry.break_hours = bh + if hh is not None: + entry.holiday_hours = hh + if oh is not None: + entry.bereavement_hours = oh + + new_pto_type = (pto_type or "").strip() or None + + if new_pto_type and not entry.pto_type: + if entry.clock_in: + entry.pto_clock_in_backup = entry.clock_in + if entry.clock_out: + entry.pto_clock_out_backup = entry.clock_out + entry.clock_in = None + entry.clock_out = None + + if not new_pto_type and entry.pto_type: + if entry.pto_clock_in_backup: + entry.clock_in = entry.pto_clock_in_backup + if entry.pto_clock_out_backup: + entry.clock_out = entry.pto_clock_out_backup + + entry.pto_type = new_pto_type + if ph is not None: + entry.pto_hours = ph + + worked = D(entry.total_hours or 0) - D(entry.break_hours or 0) + if worked < D(0): + worked = D(0) + entry.hours_paid = q2( + worked + D(entry.pto_hours or 0) + D(entry.holiday_hours or 0) + D(entry.bereavement_hours or 0) + ) + + db.commit() + + if redirect_to: + return RedirectResponse(url=redirect_to, status_code=303) + + suffix = f"?timesheet_id={timesheet_id}" + if employee_id: + suffix += f"&employee_id={employee_id}" + return RedirectResponse(url=f"/viewer{suffix}", status_code=303) + +# -------- Routers -------- +app.include_router(attendance_router) +app.include_router(dept_router) + +# -------- Clock editing (JSON for instant UI updates) -------- +def _parse_dt_local(v: Optional[str]) -> Optional[datetime]: + if not v: + return None + v = v.strip() + if not v: + return None + try: + return datetime.fromisoformat(v) + except Exception: + for fmt in ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"): + try: + return datetime.strptime(v, fmt) + except ValueError: + continue + return None + +@app.post("/timesheet/update-clocks") +@login_required +async def update_clocks( + request: Request, + entry_id: int = Form(...), + clock_in: Optional[str] = Form(None), + clock_out: Optional[str] = Form(None), + break_hours: Optional[str] = Form(None), + pto_hours: Optional[str] = Form(None), + holiday_hours: Optional[str] = Form(None), + bereavement_hours: Optional[str] = Form(None), + redirect_to: Optional[str] = Form(None), + db: Session = Depends(get_session), +): + accept = (request.headers.get("accept") or "").lower() + + if not current_is_admin(db, request.session.get("user_id")): + if "application/json" in accept: + return JSONResponse({"ok": False, "error": "admin_required"}, status_code=403) + raise HTTPException(status_code=403, detail="Read-only user: edit action not permitted") + + entry = db.query(TimeEntry).get(entry_id) + if not entry: + if "application/json" in accept: + return JSONResponse({"ok": False, "error": "not_found"}, status_code=404) + return RedirectResponse(url=redirect_to or "/", status_code=303) + + new_ci = _parse_dt_local(clock_in) + new_co = _parse_dt_local(clock_out) + + if new_ci is not None: + entry.clock_in = new_ci + if new_co is not None: + entry.clock_out = new_co + + if entry.clock_in and entry.clock_out and entry.clock_out <= entry.clock_in: + entry.clock_out = entry.clock_out + timedelta(days=1) + + if entry.clock_in and entry.clock_out: + entry.total_hours = q2(D((entry.clock_out - entry.clock_in).total_seconds()) / D(3600)) + + bh = _to_dec_opt(break_hours) + ph = _to_dec_opt(pto_hours) + hh = _to_dec_opt(holiday_hours) + oh = _to_dec_opt(bereavement_hours) + + if bh is not None: + entry.break_hours = bh + if ph is not None: + entry.pto_hours = ph + if hh is not None: + entry.holiday_hours = hh + if oh is not None: + entry.bereavement_hours = oh + + worked = D(entry.total_hours or 0) - D(entry.break_hours or 0) + if worked < D(0): + worked = D(0) + entry.hours_paid = q2( + worked + D(entry.pto_hours or 0) + D(entry.holiday_hours or 0) + D(entry.bereavement_hours or 0) + ) + + db.commit() + + if "application/json" in accept: + return JSONResponse({ + "ok": True, + "clock_in_fmt": fmt_excel_dt(entry.clock_in) if entry.clock_in else None, + "clock_out_fmt": fmt_excel_dt(entry.clock_out) if entry.clock_out else None, + "total_hours": float(D(entry.total_hours or 0)), + "total_hours_fmt": fmt2(entry.total_hours or 0), + "hours_paid": float(D(entry.hours_paid or 0)), + "hours_paid_fmt": fmt2(entry.hours_paid or 0), + }) + + return RedirectResponse(url=redirect_to or "/", status_code=303) + +# -------- Delete a single time entry -------- +@app.post("/timesheet/delete-entry") +@login_required +def delete_entry_row( + request: Request, + entry_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + entry = db.query(TimeEntry).get(entry_id) + if not entry or int(entry.timesheet_id) != int(timesheet_id): + raise HTTPException(status_code=404, detail="Time entry not found for this time period") + + db.execute(text("DELETE FROM import_batch_items WHERE time_entry_id = :eid"), {"eid": entry_id}) + + db.delete(entry) + db.commit() + return JSONResponse({"ok": True}) + +# -------- Bulk delete (multi-select) -------- +@app.post("/timesheet/delete-entries") +@login_required +def delete_entries( + request: Request, + timesheet_id: int = Form(...), + entry_ids: str = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ids = [] + for tok in (entry_ids or "").split(","): + tok = tok.strip() + if not tok: + continue + try: + ids.append(int(tok)) + except Exception: + continue + ids = list({i for i in ids}) + if not ids: + return JSONResponse({"ok": True, "deleted": 0}) + + valid_ids = [ + rid + for (rid,) in db.query(TimeEntry.id) + .filter(TimeEntry.id.in_(ids), TimeEntry.timesheet_id == timesheet_id) + .all() + ] + if not valid_ids: + return JSONResponse({"ok": True, "deleted": 0}) + + for eid in valid_ids: + db.execute(text("DELETE FROM import_batch_items WHERE time_entry_id = :eid"), {"eid": eid}) + + db.query(TimeEntry).filter(TimeEntry.id.in_(valid_ids)).delete(synchronize_session=False) + db.commit() + return JSONResponse({"ok": True, "deleted": len(valid_ids)}) + +# -------- Employee-period settings -------- +@app.post("/viewer/update-employee-period") +@login_required +def update_employee_period( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + carry_over_hours: Optional[str] = Form("0"), + redirect_to: Optional[str] = Form(None), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == employee_id) + .first() + ) + if not eps: + eps = EmployeePeriodSetting( + employee_id=employee_id, + period_start=ts.period_start, + period_end=ts.period_end, + timesheet_id=timesheet_id, + ) + db.add(eps) + + eps.carry_over_hours = q2(D(carry_over_hours or "0")) + + db.commit() + if redirect_to: + return RedirectResponse(url=redirect_to, status_code=303) + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&employee_id={employee_id}", status_code=303) + +# -------- Submit timesheet -------- +@app.post("/viewer/submit") +@login_required +def submit_timesheet( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + row = ( + db.query(TimesheetStatus) + .filter(TimesheetStatus.timesheet_id == timesheet_id, TimesheetStatus.employee_id == employee_id) + .first() + ) + if not row: + db.add( + TimesheetStatus( + timesheet_id=timesheet_id, + employee_id=employee_id, + period_start=ts.period_start, + period_end=ts.period_end, + status="submitted", + ) + ) + else: + row.status = "submitted" + db.commit() + + employees_all = db.query(Employee).order_by(Employee.name.asc()).all() + emp_ids_with_rows = [ + r[0] + for r in db.query(TimeEntry.employee_id) + .filter(TimeEntry.timesheet_id == timesheet_id) + .group_by(TimeEntry.employee_id) + .all() + ] + submitted = { + r[0]: r[1] + for r in db.query(TimesheetStatus.employee_id, TimesheetStatus.status) + .filter(TimesheetStatus.timesheet_id == timesheet_id) + .all() + } + pending_ids = [eid for eid in emp_ids_with_rows if eid not in submitted or submitted[eid] != "submitted"] + + next_employee_id: Optional[int] = None + current_index = next((i for i, e in enumerate(employees_all) if e.id == employee_id), None) + if current_index is not None: + for e in employees_all[current_index + 1 :]: + if e.id in pending_ids: + next_employee_id = e.id + break + + if next_employee_id: + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&employee_id={next_employee_id}", status_code=303) + + if len(pending_ids) == 0: + msg = "You are all done with this timeperiod, See you next time!" + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&include_submitted=1&msg={msg}", status_code=303) + + first_pending = next((e.id for e in employees_all if e.id in pending_ids), None) + if first_pending: + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}&employee_id={first_pending}", status_code=303) + + return RedirectResponse(url=f"/viewer?timesheet_id={timesheet_id}", status_code=303) + +# -------- Keep duplicates / review actions (unchanged) -------- +@app.post("/viewer/keep-duplicates") +@login_required +def viewer_keep_duplicates( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + dup_rows = ( + db.query(TimeEntry.work_date, func.count(TimeEntry.id)) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == employee_id) + .group_by(TimeEntry.work_date) + .having(func.count(TimeEntry.id) > 1) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + dup_dates = [d for d, _ in dup_rows] + + created = 0 + for d in dup_dates: + exists = ( + db.query(DuplicateReview) + .filter( + DuplicateReview.timesheet_id == timesheet_id, + DuplicateReview.employee_id == employee_id, + DuplicateReview.work_date == d, + ) + .first() + ) + if not exists: + db.add(DuplicateReview(timesheet_id=timesheet_id, employee_id=employee_id, work_date=d)) + created += 1 + if created: + db.commit() + + msg = "Duplicate dates marked as reviewed." + return RedirectResponse( + url=f"/viewer?timesheet_id={timesheet_id}&employee_id={employee_id}&msg={msg}", + status_code=303, + ) + +@app.post("/viewer/review-long-shifts") +@login_required +def viewer_review_long_shifts( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + flagged_dates = { + r.work_date + for r in db.query(LongShiftFlag) + .filter(LongShiftFlag.timesheet_id == timesheet_id, LongShiftFlag.employee_id == employee_id) + .all() + } + + created = 0 + for d in flagged_dates: + exists = ( + db.query(LongShiftReview) + .filter( + LongShiftReview.timesheet_id == timesheet_id, + LongShiftReview.employee_id == employee_id, + LongShiftReview.work_date == d, + ) + .first() + ) + if not exists: + db.add(LongShiftReview(timesheet_id=timesheet_id, employee_id=employee_id, work_date=d)) + created += 1 + if created: + db.commit() + + msg = "Long shifts marked as reviewed." + return RedirectResponse( + url=f"/viewer?timesheet_id={timesheet_id}&employee_id={employee_id}&msg={msg}", + status_code=303, + ) + +@app.post("/viewer/review-pto-needs") +@login_required +def viewer_review_pto_needs( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + flagged_dates = { + r.work_date + for r in db.query(PtoNeedFlag) + .filter(PtoNeedFlag.timesheet_id == timesheet_id, PtoNeedFlag.employee_id == employee_id) + .all() + } + + created = 0 + for d in flagged_dates: + exists = ( + db.query(PtoReviewFlag) + .filter( + PtoReviewFlag.timesheet_id == timesheet_id, + PtoReviewFlag.employee_id == employee_id, + PtoReviewFlag.work_date == d, + ) + .first() + ) + if not exists: + db.add(PtoReviewFlag(timesheet_id=timesheet_id, employee_id=employee_id, work_date=d)) + created += 1 + if created: + db.commit() + + msg = "PTO review marked as reviewed." + return RedirectResponse( + url=f"/viewer?timesheet_id={timesheet_id}&employee_id={employee_id}&msg={msg}", + status_code=303, + ) + +@app.post("/viewer/review-holiday-needs") +@login_required +def viewer_review_holiday_needs( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + flagged_dates = _flagged_holiday_dates(db, timesheet_id, employee_id) + + created = 0 + for d in flagged_dates: + exists = ( + db.query(HolidayReviewFlag) + .filter( + HolidayReviewFlag.timesheet_id == timesheet_id, + HolidayReviewFlag.employee_id == employee_id, + HolidayReviewFlag.work_date == d, + ) + .first() + ) + if not exists: + db.add(HolidayReviewFlag(timesheet_id=timesheet_id, employee_id=employee_id, work_date=d)) + created += 1 + if created: + db.commit() + + msg = "Holiday rows marked as reviewed." + return RedirectResponse( + url=f"/viewer?timesheet_id={timesheet_id}&employee_id={employee_id}&msg={msg}", + status_code=303, + ) + +@app.post("/review/review-holiday-needs") +@login_required +def review_review_holiday_needs( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + flagged_dates = _flagged_holiday_dates(db, timesheet_id, employee_id) + + created = 0 + for d in flagged_dates: + exists = ( + db.query(HolidayReviewFlag) + .filter( + HolidayReviewFlag.timesheet_id == timesheet_id, + HolidayReviewFlag.employee_id == employee_id, + HolidayReviewFlag.work_date == d, + ) + .first() + ) + if not exists: + db.add(HolidayReviewFlag(timesheet_id=timesheet_id, employee_id=employee_id, work_date=d)) + created += 1 + if created: + db.commit() + + msg = "Holiday rows marked as reviewed." + return RedirectResponse( + url=f"/review/edit?timesheet_id={timesheet_id}&employee_id={employee_id}&flash={msg.replace(' ', '+')}", + status_code=303, + ) + +# -------- Delete time period -------- +@app.post("/viewer/delete-period") +@login_required +def delete_period_post( + request: Request, + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + db.execute( + text(""" + DELETE FROM import_batch_items + USING time_entries + WHERE import_batch_items.time_entry_id = time_entries.id + AND time_entries.timesheet_id = :tid + """), + {"tid": timesheet_id}, + ) + + db.query(TimeEntry).filter(TimeEntry.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(TimesheetStatus).filter(TimesheetStatus.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(EmployeePeriodSetting).filter(EmployeePeriodSetting.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(DuplicateReview).filter(DuplicateReview.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(LongShiftFlag).filter(LongShiftFlag.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(LongShiftReview).filter(LongShiftReview.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(PtoNeedFlag).filter(PtoNeedFlag.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(PtoReviewFlag).filter(PtoReviewFlag.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(HolidayReviewFlag).filter(HolidayReviewFlag.timesheet_id == timesheet_id).delete(synchronize_session=False) + db.query(PayrollNote).filter(PayrollNote.timesheet_id == timesheet_id).delete(synchronize_session=False) + + db.delete(ts) + db.commit() + + return RedirectResponse(url="/viewer?msg=Time+Period+deleted", status_code=303) + +# -------- NEW: Remove an employee from a single time period -------- +@app.post("/viewer/remove-employee") +@login_required +def viewer_remove_employee( + request: Request, + employee_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + """ + Remove an employee's data from a single timesheet: + - Deletes their time entries for the period + - Cleans up import batch item references + - Clears status, settings, duplicate reviews, long-shift flags/reviews, + PTO need/review flags, holiday review flags, and payroll notes + """ + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + # Delete import batch item links for this employee's entries in this period + db.execute( + text(""" + DELETE FROM import_batch_items + USING time_entries + WHERE import_batch_items.time_entry_id = time_entries.id + AND time_entries.timesheet_id = :tid + AND time_entries.employee_id = :eid + """), + {"tid": timesheet_id, "eid": employee_id}, + ) + + # Delete time entries + deleted_entries = db.query(TimeEntry).filter( + TimeEntry.timesheet_id == timesheet_id, + TimeEntry.employee_id == employee_id, + ).delete(synchronize_session=False) + + # Delete ancillary per-employee/per-period rows + db.query(TimesheetStatus).filter( + TimesheetStatus.timesheet_id == timesheet_id, + TimesheetStatus.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(EmployeePeriodSetting).filter( + EmployeePeriodSetting.timesheet_id == timesheet_id, + EmployeePeriodSetting.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(DuplicateReview).filter( + DuplicateReview.timesheet_id == timesheet_id, + DuplicateReview.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(LongShiftFlag).filter( + LongShiftFlag.timesheet_id == timesheet_id, + LongShiftFlag.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(LongShiftReview).filter( + LongShiftReview.timesheet_id == timesheet_id, + LongShiftReview.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(PtoNeedFlag).filter( + PtoNeedFlag.timesheet_id == timesheet_id, + PtoNeedFlag.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(PtoReviewFlag).filter( + PtoReviewFlag.timesheet_id == timesheet_id, + PtoReviewFlag.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(HolidayReviewFlag).filter( + HolidayReviewFlag.timesheet_id == timesheet_id, + HolidayReviewFlag.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.query(PayrollNote).filter( + PayrollNote.timesheet_id == timesheet_id, + PayrollNote.employee_id == employee_id, + ).delete(synchronize_session=False) + + db.commit() + + return JSONResponse({"ok": True, "deleted_entries": int(deleted_entries)}) + +# ----------------------- +# Admin pages (users/employees) - unchanged +# ----------------------- +@app.get("/admin/employees", response_class=HTMLResponse) +@login_required +def admin_employees_page( + request: Request, + include_inactive: int = Query(1), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + if include_inactive: + employees = db.query(Employee).order_by(Employee.name.asc()).all() + else: + employees = db.query(Employee).filter(text("COALESCE(is_active, TRUE) = TRUE")).order_by(Employee.name.asc()).all() + + inactive_ids = {r[0] for r in db.execute(text("SELECT id FROM employees WHERE COALESCE(is_active, TRUE) = FALSE")).fetchall()} + + return templates.TemplateResponse( + "admin_employees.html", + {"request": request, "employees": employees, "include_inactive": include_inactive, "inactive_ids": inactive_ids}, + ) + +@app.post("/admin/employees/set-status") +@login_required +def admin_employees_set_status( + request: Request, + employee_id: int = Form(...), + is_active: int = Form(...), + termination_date: Optional[str] = Form(None), + redirect_to: Optional[str] = Form(None), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + emp.is_active = bool(int(is_active)) + if not emp.is_active: + td = (termination_date or "").strip() + if td: + try: + emp.termination_date = date.fromisoformat(td) + except Exception: + raise HTTPException(status_code=400, detail="Invalid termination_date") + else: + emp.termination_date = None + else: + emp.termination_date = None + + db.commit() + return RedirectResponse(url=(redirect_to or "/admin/employees"), status_code=303) + +@app.get("/admin/users", response_class=HTMLResponse) +@login_required +def admin_users_page( + request: Request, + msg: Optional[str] = None, + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + users = db.query(User).order_by(User.username.asc()).all() + admin_ids = {r.user_id for r in db.query(AdminUser).all()} + profiles = {r.user_id: (r.full_name or "") for r in db.query(UserProfile).all()} + admin_count = len(admin_ids) + + return templates.TemplateResponse( + "admin_users.html", + { + "request": request, + "users": users, + "admin_ids": admin_ids, + "profiles": profiles, + "admin_count": admin_count, + "me_id": request.session.get("user_id"), + "flash": msg, + }, + ) + +@app.post("/admin/users/create") +@login_required +def admin_users_create( + request: Request, + full_name: str = Form(""), + username: str = Form(...), + password: str = Form(...), + role: str = Form("user"), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + uname = (username or "").strip() + pwd = (password or "").strip() + role = (role or "user").strip().lower() + fname = (full_name or "").strip() + + if not uname or not pwd: + return RedirectResponse(url="/admin/users?msg=Username+and+password+required", status_code=303) + if db.query(User).filter(User.username == uname).first(): + return RedirectResponse(url="/admin/users?msg=Username+already+exists", status_code=303) + + u = User(username=uname, password_hash=hash_password(pwd)) + db.add(u) + db.flush() + db.add(UserProfile(user_id=u.id, full_name=fname or None)) + if role == "admin": + db.add(AdminUser(user_id=u.id)) + db.commit() + return RedirectResponse(url="/admin/users?msg=User+created", status_code=303) + +@app.post("/admin/users/reset-password") +@login_required +def admin_users_reset_password( + request: Request, + user_id: int = Form(...), + new_password: str = Form(...), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + u = db.query(User).get(user_id) + if not u: + return RedirectResponse(url="/admin/users?msg=User+not+found", status_code=303) + pwd = (new_password or "").strip() + if not pwd: + return RedirectResponse(url="/admin/users?msg=Password+required", status_code=303) + u.password_hash = hash_password(pwd) + db.commit() + return RedirectResponse(url="/admin/users?msg=Password+reset", status_code=303) + +@app.post("/admin/users/update-role") +@login_required +def admin_users_update_role( + request: Request, + user_id: int = Form(...), + role: str = Form(...), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + role = (role or "user").strip().lower() + target = db.query(User).get(user_id) + if not target: + return RedirectResponse(url="/admin/users?msg=User+not+found", status_code=303) + + is_admin_now = bool(db.query(AdminUser).filter(AdminUser.user_id == user_id).first()) + + if role == "user": + return RedirectResponse(url="/admin/users?msg=Demotion+disabled", status_code=303) + + if not is_admin_now: + db.add(AdminUser(user_id=user_id)) + db.commit() + return RedirectResponse(url="/admin/users?msg=Promoted+to+admin", status_code=303) + + return RedirectResponse(url="/admin/users?msg=Already+admin", status_code=303) + +@app.post("/admin/users/delete") +@login_required +def admin_users_delete( + request: Request, + user_id: int = Form(...), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + if user_id == request.session.get("user_id"): + return RedirectResponse(url="/admin/users?msg=Cannot+delete+the+current+user", status_code=303) + + target = db.query(User).get(user_id) + if not target: + return RedirectResponse(url="/admin/users?msg=User+not+found", status_code=303) + + db.query(AdminUser).filter(AdminUser.user_id == user_id).delete(synchronize_session=False) + db.query(UserProfile).filter(UserProfile.user_id == user_id).delete(synchronize_session=False) + db.delete(target) + db.commit() + return RedirectResponse(url="/admin/users?msg=User+deleted", status_code=303) + +# -------- Viewer (Timesheet Editor) +@app.get("/viewer", response_class=HTMLResponse) +@login_required +def viewer_page( + request: Request, + timesheet_id: Optional[int] = Query(None), + employee_id: Optional[int] = Query(None), + include_submitted: int = 0, + msg: Optional[str] = None, + db: Session = Depends(get_session), +): + employees_all = db.query(Employee).order_by(Employee.name.asc()).all() + + sheets = enumerate_timesheets_global(db) + period_options = [{"timesheet_id": tid, "start": ps, "end": pe, "display": name} for tid, ps, pe, name in sheets] + if not period_options: + return templates.TemplateResponse( + "viewer.html", + { + "request": request, + "employees": [], + "selected_employee": None, + "period_options": [], + "active_ts": None, + "grouped": None, + "employee_setting": {"carry_over_hours": 0.0}, + "duplicates": [], + "dup_dates": set(), + "flash": "No timesheet instances found.", + "all_done": False, + "can_edit": bool(request.session.get("is_admin")), + }, + ) + + ts = db.query(TimesheetPeriod).get(timesheet_id) if timesheet_id else db.query(TimesheetPeriod).get(period_options[-1]["timesheet_id"]) + if not ts: + ts = db.query(TimesheetPeriod).get(period_options[-1]["timesheet_id"]) + active_ts_id = ts.id + + done_message = "You are all done with this timeperiod, See you next time!" + if msg and done_message in msg: + return templates.TemplateResponse( + "viewer.html", + { + "request": request, + "employees": [], + "selected_employee": None, + "period_options": period_options, + "active_ts": active_ts_id, + "grouped": None, + "employee_setting": {"carry_over_hours": 0.0}, + "duplicates": [], + "dup_dates": set(), + "flash": msg, + "all_done": True, + "can_edit": bool(request.session.get("is_admin")), + }, + ) + + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == active_ts_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + if not week_map: + return RedirectResponse(url=f"/assign-weeks?timesheet_id={active_ts_id}", status_code=303) + + emp_ids_with_rows = [ + r[0] for r in db.query(TimeEntry.employee_id).filter(TimeEntry.timesheet_id == active_ts_id).group_by(TimeEntry.employee_id).all() + ] + submitted = { + r[0]: r[1] + for r in db.query(TimesheetStatus.employee_id, TimesheetStatus.status).filter(TimesheetStatus.timesheet_id == active_ts_id).all() + } + pending_ids = [eid for eid in emp_ids_with_rows if eid not in submitted or submitted[eid] != "submitted"] + employees = [e for e in employees_all if (e.id in pending_ids) or (include_submitted and e.id in submitted)] + selected_employee = db.query(Employee).get(employee_id) if employee_id else (employees[0] if employees else (employees_all[0] if employees_all else None)) + + carry = 0.0 + if selected_employee: + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == active_ts_id, EmployeePeriodSetting.employee_id == selected_employee.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + + entries = [] + if selected_employee: + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == active_ts_id, TimeEntry.employee_id == selected_employee.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + + dups_rows_raw = [] + dup_dates = set() + dups = [] + if selected_employee: + dups_rows_raw = ( + db.query(TimeEntry.work_date, func.count(TimeEntry.id)) + .filter(TimeEntry.timesheet_id == active_ts_id, TimeEntry.employee_id == selected_employee.id) + .group_by(TimeEntry.work_date) + .having(func.count(TimeEntry.id) > 1) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + reviewed_dates = { + r.work_date + for r in db.query(DuplicateReview) + .filter(DuplicateReview.timesheet_id == active_ts_id, DuplicateReview.employee_id == selected_employee.id) + .all() + } + dups_rows = [(d, c) for d, c in dups_rows_raw if d not in reviewed_dates] + dup_dates = {d for d, c in dups_rows} + dups = [{"date": d, "count": int(c)} for d, c in dups_rows] + + reviewed_long_dates = set() + flagged_long_dates = set() + if selected_employee: + reviewed_long_dates = { + r.work_date + for r in db.query(LongShiftReview) + .filter(LongShiftReview.timesheet_id == active_ts_id, LongShiftReview.employee_id == selected_employee.id) + .all() + } + flagged_long_dates = { + r.work_date + for r in db.query(LongShiftFlag) + .filter(LongShiftFlag.timesheet_id == active_ts_id, LongShiftFlag.employee_id == selected_employee.id) + .all() + } + newly_flagged = 0 + for r in grouped.rows: + if float(r.total_hours or 0.0) >= 10.0 and r.work_date not in flagged_long_dates: + db.add(LongShiftFlag(timesheet_id=active_ts_id, employee_id=selected_employee.id, work_date=r.work_date)) + flagged_long_dates.add(r.work_date) + newly_flagged += 1 + if newly_flagged: + db.commit() + + reviewed_pto_dates = set() + flagged_pto_dates = set() + if selected_employee: + reviewed_pto_dates = { + r.work_date + for r in db.query(PtoReviewFlag) + .filter(PtoReviewFlag.timesheet_id == active_ts_id, PtoReviewFlag.employee_id == selected_employee.id) + .all() + } + flagged_pto_dates = { + r.work_date + for r in db.query(PtoNeedFlag) + .filter(PtoNeedFlag.timesheet_id == active_ts_id, PtoNeedFlag.employee_id == selected_employee.id) + .all() + } + newly_flagged_pto = 0 + for r in grouped.rows: + if float(r.pto_hours or 0.0) > 0.0 and not (r.pto_type or "").strip() and r.work_date not in flagged_pto_dates: + db.add(PtoNeedFlag(timesheet_id=active_ts_id, employee_id=selected_employee.id, work_date=r.work_date)) + flagged_pto_dates.add(r.work_date) + newly_flagged_pto += 1 + if newly_flagged_pto: + db.commit() + + flagged_holiday_dates = set() + reviewed_holiday_dates = set() + holiday_needs = [] + if selected_employee: + flagged_holiday_dates = _flagged_holiday_dates(db, active_ts_id, selected_employee.id) + reviewed_holiday_dates = _reviewed_holiday_dates(db, active_ts_id, selected_employee.id) + holiday_needs = _holiday_needs_rows(db, active_ts_id, selected_employee.id) + + long_shift_needs = [r for r in grouped.rows if (r.work_date in flagged_long_dates and r.work_date not in reviewed_long_dates)] + pto_needs = [r for r in grouped.rows if (r.work_date in flagged_pto_dates and r.work_date not in reviewed_pto_dates)] + + return templates.TemplateResponse( + "viewer.html", + { + "request": request, + "employees": employees, + "selected_employee": selected_employee, + "period_options": period_options, + "active_ts": active_ts_id, + "grouped": grouped, + "employee_setting": {"carry_over_hours": carry}, + "duplicates": dups, + "dup_dates": dup_dates, + "pto_needs": pto_needs, + "holiday_needs": holiday_needs, + "long_shift_needs": long_shift_needs, + "reviewed_long_dates": reviewed_long_dates, + "flagged_long_dates": flagged_long_dates, + "reviewed_pto_dates": reviewed_pto_dates, + "flagged_pto_dates": flagged_pto_dates, + "reviewed_holiday_dates": reviewed_holiday_dates, + "flagged_holiday_dates": flagged_holiday_dates, + "flash": msg, + "all_done": False, + "can_edit": bool(request.session.get("is_admin")), + }, + ) + +# -------- Review list -------- +@app.get("/review", response_class=HTMLResponse) +@login_required +def review_page( + request: Request, + timesheet_id: Optional[int] = Query(None), + msg: Optional[str] = None, + db: Session = Depends(get_session), +): + sheets = enumerate_timesheets_global(db) + period_options = [{"timesheet_id": tid, "display": name} for tid, ps, pe, name in sheets] + if not period_options: + return templates.TemplateResponse( + "review.html", + {"request": request, "submitted": [], "active_ts": None, "period_options": [], "flash": "No submitted timesheets yet."}, + ) + + active_ts = timesheet_id or period_options[-1]["timesheet_id"] + + rows = ( + db.query(TimesheetStatus, Employee) + .join(Employee, Employee.id == TimesheetStatus.employee_id) + .filter(TimesheetStatus.timesheet_id == active_ts) + .order_by(Employee.name.asc()) + .all() + ) + submitted_rows = [ + {"employee_id": emp.id, "employee_name": emp.name, "submitted_at": to_local(tsrow.submitted_at)} + for tsrow, emp in rows + if tsrow.status == "submitted" + ] + + return templates.TemplateResponse( + "review.html", + {"request": request, "submitted": submitted_rows, "active_ts": active_ts, "period_options": period_options, "flash": msg}, + ) + +# -------- Review edit -------- +@app.get("/review/edit", response_class=HTMLResponse) +@login_required +def review_edit_page( + request: Request, + timesheet_id: int = Query(...), + employee_id: int = Query(...), + msg: Optional[str] = None, + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + + dups_rows_raw = ( + db.query(TimeEntry.work_date, func.count(TimeEntry.id)) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .group_by(TimeEntry.work_date) + .having(func.count(TimeEntry.id) > 1) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + reviewed_dup_dates = { + r.work_date + for r in db.query(DuplicateReview) + .filter(DuplicateReview.timesheet_id == timesheet_id, DuplicateReview.employee_id == emp.id) + .all() + } + dups_rows = [(d, c) for d, c in dups_rows_raw if d not in reviewed_dup_dates] + dup_dates = {d for d, c in dups_rows} + duplicates = [{"date": d, "count": int(c)} for d, c in dups_rows] + + reviewed_long_dates = { + r.work_date + for r in db.query(LongShiftReview) + .filter(LongShiftReview.timesheet_id == timesheet_id, LongShiftReview.employee_id == emp.id) + .all() + } + flagged_long_dates = { + r.work_date + for r in db.query(LongShiftFlag) + .filter(LongShiftFlag.timesheet_id == timesheet_id, LongShiftFlag.employee_id == emp.id) + .all() + } + newly_flagged = 0 + for r in grouped.rows: + if float(r.total_hours or 0.0) >= 10.0 and r.work_date not in flagged_long_dates: + db.add(LongShiftFlag(timesheet_id=timesheet_id, employee_id=emp.id, work_date=r.work_date)) + flagged_long_dates.add(r.work_date) + newly_flagged += 1 + if newly_flagged: + db.commit() + long_shift_needs = [r for r in grouped.rows if (r.work_date in flagged_long_dates and r.work_date not in reviewed_long_dates)] + + reviewed_pto_dates = { + r.work_date + for r in db.query(PtoReviewFlag) + .filter(PtoReviewFlag.timesheet_id == timesheet_id, PtoReviewFlag.employee_id == emp.id) + .all() + } + flagged_pto_dates = { + r.work_date + for r in db.query(PtoNeedFlag) + .filter(PtoNeedFlag.timesheet_id == timesheet_id, PtoNeedFlag.employee_id == emp.id) + .all() + } + newly_flagged_pto = 0 + for r in grouped.rows: + if float(r.pto_hours or 0.0) > 0.0 and not (r.pto_type or "").strip() and r.work_date not in flagged_pto_dates: + db.add(PtoNeedFlag(timesheet_id=timesheet_id, employee_id=emp.id, work_date=r.work_date)) + flagged_pto_dates.add(r.work_date) + newly_flagged_pto += 1 + if newly_flagged_pto: + db.commit() + pto_needs = [r for r in grouped.rows if (r.work_date in flagged_pto_dates and r.work_date not in reviewed_pto_dates)] + + flagged_holiday_dates = _flagged_holiday_dates(db, timesheet_id, emp.id) + reviewed_holiday_dates = _reviewed_holiday_dates(db, timesheet_id, emp.id) + holiday_needs = _holiday_needs_rows(db, timesheet_id, emp.id) + + # NEW: payroll note (reimbursement/additional/notes) + payroll_note = ( + db.query(PayrollNote) + .filter(PayrollNote.timesheet_id == timesheet_id, PayrollNote.employee_id == emp.id) + .first() + ) + + return templates.TemplateResponse( + "review_edit.html", + { + "request": request, + "employee": emp, + "timesheet_id": timesheet_id, + "period_name": ts.name or f"{ts.period_start}..{ts.period_end}", + "grouped": grouped, + "carry_over_hours": carry, + "flash": msg, + "duplicates": duplicates, + "dup_dates": dup_dates, + "long_shift_needs": long_shift_needs, + "flagged_long_dates": flagged_long_dates, + "reviewed_long_dates": reviewed_long_dates, + "pto_needs": pto_needs, + "flagged_pto_dates": flagged_pto_dates, + "reviewed_pto_dates": reviewed_pto_dates, + "holiday_needs": holiday_needs, + "flagged_holiday_dates": flagged_holiday_dates, + "reviewed_holiday_dates": reviewed_holiday_dates, + "can_edit": bool(request.session.get("is_admin")), + "payroll_note": payroll_note, + }, + ) + +# -------- NEW: update payroll note (reimbursement/additional/notes) +@app.post("/review/update-payroll-note") +@login_required +def review_update_payroll_note( + request: Request, + timesheet_id: int = Form(...), + employee_id: int = Form(...), + reimbursement_amount: Optional[str] = Form(""), + additional_payroll_amount: Optional[str] = Form(""), + notes: Optional[str] = Form(""), + db: Session = Depends(get_session), +): + require_admin_edit(request, db) + + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + pn = ( + db.query(PayrollNote) + .filter(PayrollNote.timesheet_id == timesheet_id, PayrollNote.employee_id == employee_id) + .first() + ) + if not pn: + pn = PayrollNote(timesheet_id=timesheet_id, employee_id=employee_id) + db.add(pn) + + pn.reimbursement_amount = _parse_money(reimbursement_amount) + pn.additional_payroll_amount = _parse_money(additional_payroll_amount) + pn.notes = (notes or "").strip() or None + db.commit() + + msg = "Payroll extras saved." + return RedirectResponse(url=f"/review/edit?timesheet_id={timesheet_id}&employee_id={employee_id}&msg={msg.replace(' ', '+')}", status_code=303) + +# -------- Print single / bundle (unchanged) +@app.get("/review/print", response_class=HTMLResponse) +@login_required +def review_print( + request: Request, + employee_id: int, + timesheet_id: int, + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + + return templates.TemplateResponse( + "print_timesheet.html", + { + "request": request, + "hide_nav_links": True, + "employee": emp, + "period_name": ts.name or f"{ts.period_start}..{ts.period_end}", + "grouped": grouped, + "timesheet_id": timesheet_id, + }, + ) + +@app.get("/review/print-all", response_class=HTMLResponse) +@login_required +def review_print_all( + request: Request, + timesheet_id: int, + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + rows = ( + db.query(TimesheetStatus, Employee) + .join(Employee, Employee.id == TimesheetStatus.employee_id) + .filter(TimesheetStatus.timesheet_id == timesheet_id, TimesheetStatus.status == "submitted") + .order_by(Employee.name.asc()) + .all() + ) + + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + bundles = [] + for tsrow, emp in rows: + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + bundles.append({"employee": emp, "grouped": grouped}) + + return templates.TemplateResponse( + "print_timesheet_bundle.html", + { + "request": request, + "hide_nav_links": True, + "timesheet_id": timesheet_id, + "period_name": ts.name or f"{ts.period_start}..{ts.period_end}", + "bundles": bundles, + }, + ) + +# -------- Overview (HTML) -------- +@app.get("/overview", response_class=HTMLResponse) +@login_required +def overview_page( + request: Request, + timesheet_id: int = Query(...), + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + return templates.TemplateResponse( + "overview.html", + {"request": request, "bundles": [], "period_name": "", "timesheet_id": timesheet_id, "totals": None}, + ) + + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + employees = ( + db.query(Employee) + .join(TimesheetStatus, TimesheetStatus.employee_id == Employee.id) + .filter(TimesheetStatus.timesheet_id == timesheet_id, TimesheetStatus.status == "submitted") + .order_by(Employee.name.asc()) + .all() + ) + + bundles = [] + sum_regular = Decimal("0") + sum_pto = Decimal("0") + sum_holiday = Decimal("0") + sum_bereavement = Decimal("0") + sum_ot = Decimal("0") + sum_paid = Decimal("0") + + for emp in employees: + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + t = grouped.totals + sum_regular += Decimal(str(t.regular)) + sum_pto += Decimal(str(t.pto)) + sum_holiday += Decimal(str(t.holiday)) + sum_bereavement += Decimal(str(t.bereavement)) + sum_ot += Decimal(str(t.overtime)) + sum_paid += Decimal(str(t.paid_total)) + + bundles.append({"employee": emp, "grouped": grouped}) + + q = lambda d: float(Decimal(d).quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)) + totals = { + "regular": q(sum_regular), + "pto": q(sum_pto), + "holiday": q(sum_holiday), + "bereavement": q(sum_bereavement), + "overtime": q(sum_ot), + "paid_total": q(sum_paid), + } + + return templates.TemplateResponse( + "overview.html", + { + "request": request, + "bundles": bundles, + "period_name": ts.name or f"{ts.period_start}..{ts.period_end}", + "timesheet_id": timesheet_id, + "totals": totals, + }, + ) + +# -------- NEW: Overview export to XLSX -------- +@app.get("/overview/export-xlsx") +@login_required +def overview_export_xlsx( + request: Request, + timesheet_id: int = Query(...), + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + raise HTTPException(status_code=404, detail="Time Period not found") + + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + employees = ( + db.query(Employee) + .join(TimesheetStatus, TimesheetStatus.employee_id == Employee.id) + .filter(TimesheetStatus.timesheet_id == timesheet_id, TimesheetStatus.status == "submitted") + .order_by(Employee.name.asc()) + .all() + ) + + # Payroll notes for extras + notes_by_emp = { + n.employee_id: n + for n in db.query(PayrollNote).filter(PayrollNote.timesheet_id == timesheet_id).all() + } + + rows = [] + for emp in employees: + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + t = grouped.totals + + pn = notes_by_emp.get(emp.id) + rows.append({ + "employee_name": emp.name, + "regular": float(t.regular), + "overtime": float(t.overtime), + "pto": float(t.pto), + "holiday": float(t.holiday), + "paid_total": float(t.paid_total), + "reimbursement": float(pn.reimbursement_amount) if pn and pn.reimbursement_amount is not None else 0.0, + "additional_payroll": float(pn.additional_payroll_amount) if pn and pn.additional_payroll_amount is not None else 0.0, + "notes": pn.notes if pn and pn.notes else "", + }) + + period_name = ts.name or f"{ts.period_start}..{ts.period_end}" + xlsx_bytes = build_overview_xlsx(period_name, rows) + fname = f"overview-{timesheet_id}.xlsx" + return StreamingResponse( + BytesIO(xlsx_bytes), + media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + headers={"Content-Disposition": f'attachment; filename=\"{fname}\"'} + ) + +# -------- Overview print (unchanged) +@app.get("/overview/print", response_class=HTMLResponse) +@login_required +def overview_print( + request: Request, + timesheet_id: int = Query(...), + db: Session = Depends(get_session), +): + ts = db.query(TimesheetPeriod).get(timesheet_id) + if not ts: + return templates.TemplateResponse( + "print_overview.html", + {"request": request, "bundles": [], "period_name": "", "timesheet_id": timesheet_id, "totals": None}, + ) + + week_rows = db.query(WeekAssignment).filter(WeekAssignment.timesheet_id == timesheet_id).all() + week_map = {wr.day_date: wr.week_number for wr in week_rows} + + employees = ( + db.query(Employee) + .join(TimesheetStatus, TimesheetStatus.employee_id == Employee.id) + .filter(TimesheetStatus.timesheet_id == timesheet_id, TimesheetStatus.status == "submitted") + .order_by(Employee.name.asc()) + .all() + ) + + bundles = [] + sum_regular = Decimal("0") + sum_pto = Decimal("0") + sum_holiday = Decimal("0") + sum_bereavement = Decimal("0") + sum_ot = Decimal("0") + sum_paid = Decimal("0") + + for emp in employees: + entries = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == emp.id) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + eps = ( + db.query(EmployeePeriodSetting) + .filter(EmployeePeriodSetting.timesheet_id == timesheet_id, EmployeePeriodSetting.employee_id == emp.id) + .first() + ) + carry = float(eps.carry_over_hours if eps else 0.0) + grouped = group_entries_for_timesheet(entries, ts.period_start, ts.period_end, week_map=week_map, carry_over_hours=carry) + + t = grouped.totals + sum_regular += Decimal(str(t.regular)) + sum_pto += Decimal(str(t.pto)) + sum_holiday += Decimal(str(t.holiday)) + sum_bereavement += Decimal(str(t.bereavement)) + sum_ot += Decimal(str(t.overtime)) + sum_paid += Decimal(str(t.paid_total)) + + bundles.append({"employee": emp, "grouped": grouped}) + + q = lambda d: float(Decimal(d).quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)) + totals = { + "regular": q(sum_regular), + "pto": q(sum_pto), + "holiday": q(sum_holiday), + "bereavement": q(sum_bereavement), + "overtime": q(sum_ot), + "paid_total": q(sum_paid), + } + + return templates.TemplateResponse( + "print_overview.html", + { + "request": request, + "hide_nav_links": True, + "bundles": bundles, + "period_name": ts.name or f"{ts.period_start}..{ts.period_end}", + "timesheet_id": timesheet_id, + "totals": totals, + }, + ) + +# ======================= +# PTO Tracker (Admin only, per-year) +# ======================= +def _years_for_employee(db: Session, employee_id: int) -> List[int]: + cur = datetime.utcnow().year + years: Set[int] = set() + rows = ( + db.query(func.extract("year", TimeEntry.work_date)) + .join( + TimesheetStatus, + (TimesheetStatus.timesheet_id == TimeEntry.timesheet_id) + & (TimesheetStatus.employee_id == TimeEntry.employee_id), + ) + .filter(TimeEntry.employee_id == employee_id, TimesheetStatus.status == "submitted") + .distinct() + .all() + ) + years |= {int(float(r[0])) for r in rows if r[0] is not None} + years |= { + y for (y,) in db.query(func.distinct(PTOAccount.year)).filter(PTOAccount.employee_id == employee_id).all() if y is not None + } + years |= { + y for (y,) in db.query(func.distinct(PTOAdjustment.year)).filter(PTOAdjustment.employee_id == employee_id).all() if y is not None + } + years.add(cur) + years.add(cur - 1) + return sorted(years) + +@app.get("/pto-tracker", response_class=HTMLResponse) +@login_required +def pto_tracker_page( + request: Request, + employee_id: Optional[int] = Query(None), + year: Optional[int] = Query(None), + include_inactive: int = Query(0), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + if include_inactive: + employees_all = db.query(Employee).order_by(Employee.name.asc()).all() + else: + employees_all = ( + db.query(Employee) + .filter(text("COALESCE(is_active, TRUE) = TRUE")) + .order_by(Employee.name.asc()) + .all() + ) + + inactive_rows = db.execute(text("SELECT id FROM employees WHERE COALESCE(is_active, TRUE) = FALSE")).fetchall() + inactive_ids = {r[0] for r in inactive_rows} + + if not employees_all: + return templates.TemplateResponse( + "pto_tracker.html", + { + "request": request, + "employees": [], + "selected_employee": None, + "years": [datetime.utcnow().year], + "selected_year": datetime.utcnow().year, + "starting_balance": 0.0, + "remaining_balance": 0.0, + "ledger": [], + "include_inactive": include_inactive, + "inactive_ids": inactive_ids, + }, + ) + + selected = db.query(Employee).get(employee_id) if employee_id else employees_all[0] + + years = _years_for_employee(db, selected.id) + sel_year = int(year) if year else (years[-1] if years else datetime.utcnow().year) + if sel_year not in years: + years.append(sel_year) + years.sort() + + y_start = date(sel_year, 1, 1) + y_end = date(sel_year, 12, 31) + + acct = ( + db.query(PTOAccount) + .filter(PTOAccount.employee_id == selected.id, PTOAccount.year == sel_year) + .first() + ) + start_bal = D(acct.starting_balance if acct else 0) + + adjustments = ( + db.query(PTOAdjustment) + .filter(PTOAdjustment.employee_id == selected.id, PTOAdjustment.year == sel_year) + .order_by(PTOAdjustment.created_at.asc()) + .all() + ) + + usage_rows = ( + db.query(TimeEntry.work_date, TimeEntry.pto_type, func.sum(TimeEntry.pto_hours).label("hours")) + .join( + TimesheetStatus, + (TimesheetStatus.timesheet_id == TimeEntry.timesheet_id) & (TimesheetStatus.employee_id == TimeEntry.employee_id), + ) + .outerjoin( + PTOUsageExclusion, + (PTOUsageExclusion.employee_id == selected.id) + & (PTOUsageExclusion.work_date == TimeEntry.work_date) + & (func.coalesce(PTOUsageExclusion.pto_type, "") == func.coalesce(TimeEntry.pto_type, "")), + ) + .filter( + TimeEntry.employee_id == selected.id, + TimeEntry.pto_hours > 0, + TimesheetStatus.status == "submitted", + TimeEntry.work_date >= y_start, + TimeEntry.work_date <= y_end, + PTOUsageExclusion.id.is_(None), + ) + .group_by(TimeEntry.work_date, TimeEntry.pto_type) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + + events = [] + for a in adjustments: + events.append({ + "kind": "adjustment", + "date": a.created_at.date(), + "desc": (a.note or "Adjustment"), + "delta": D(a.hours), + "adj_id": a.id, + }) + for u_date, u_type, u_hours in usage_rows: + events.append({ + "kind": "usage", + "date": u_date, + "desc": (u_type or "PTO"), + "delta": -D(u_hours), + "u_date": u_date.isoformat(), + "u_type": u_type or "", + }) + + events.sort(key=lambda e: (e["date"], 0 if e["kind"] == "adjustment" else 1)) + + running = q2(start_bal) + ledger = [{ + "date": None, + "desc": f"Starting balance ({sel_year})", + "delta": "", + "balance": float(running), + "kind": "start", + }] + for ev in events: + running = q2(running + ev["delta"]) + row = { + "date": ev["date"], + "desc": ev["desc"], + "delta": float(q2(ev["delta"])), + "balance": float(running), + "kind": ev["kind"], + } + if ev["kind"] == "adjustment": + row["adj_id"] = ev["adj_id"] + else: + row["u_date"] = ev["u_date"] + row["u_type"] = ev["u_type"] + ledger.append(row) + + remaining = float(running) + + return templates.TemplateResponse( + "pto_tracker.html", + { + "request": request, + "employees": employees_all, + "selected_employee": selected, + "years": years, + "selected_year": sel_year, + "starting_balance": float(q2(start_bal)), + "remaining_balance": remaining, + "ledger": ledger, + "include_inactive": include_inactive, + "inactive_ids": inactive_ids, + }, + ) + +# -------- PTO tracker helpers/print (unchanged) +def _build_pto_ledger(db: Session, emp_id: int, sel_year: int): + y_start = date(sel_year, 1, 1) + y_end = date(sel_year, 12, 31) + + acct = db.query(PTOAccount).filter(PTOAccount.employee_id == emp_id, PTOAccount.year == sel_year).first() + start_bal = D(acct.starting_balance if acct else 0) + + adjustments = ( + db.query(PTOAdjustment) + .filter(PTOAdjustment.employee_id == emp_id, PTOAdjustment.year == sel_year) + .order_by(PTOAdjustment.created_at.asc()) + .all() + ) + + usage_rows = ( + db.query(TimeEntry.work_date, TimeEntry.pto_type, func.sum(TimeEntry.pto_hours).label("hours")) + .join( + TimesheetStatus, + (TimesheetStatus.timesheet_id == TimeEntry.timesheet_id) & (TimesheetStatus.employee_id == TimeEntry.employee_id), + ) + .outerjoin( + PTOUsageExclusion, + (PTOUsageExclusion.employee_id == emp_id) + & (PTOUsageExclusion.work_date == TimeEntry.work_date) + & (func.coalesce(PTOUsageExclusion.pto_type, "") == func.coalesce(TimeEntry.pto_type, "")), + ) + .filter( + TimeEntry.employee_id == emp_id, + TimeEntry.pto_hours > 0, + TimesheetStatus.status == "submitted", + TimeEntry.work_date >= y_start, + TimeEntry.work_date <= y_end, + PTOUsageExclusion.id.is_(None), + ) + .group_by(TimeEntry.work_date, TimeEntry.pto_type) + .order_by(TimeEntry.work_date.asc()) + .all() + ) + + events = [] + for a in adjustments: + events.append({"kind": "adjustment", "date": a.created_at.date(), "desc": (a.note or "Adjustment"), "delta": D(a.hours)}) + for u_date, u_type, u_hours in usage_rows: + events.append({"kind": "usage", "date": u_date, "desc": (u_type or "PTO"), "delta": -D(u_hours)}) + events.sort(key=lambda e: (e["date"], 0 if e["kind"] == "adjustment" else 1)) + + running = q2(start_bal) + ledger = [{"date": None, "desc": f"Starting balance ({sel_year})", "delta": "", "balance": float(running), "kind": "start"}] + for ev in events: + running = q2(running + ev["delta"]) + ledger.append({ + "date": ev["date"], + "desc": ev["desc"], + "delta": float(q2(ev["delta"])), + "balance": float(running), + "kind": ev["kind"], + }) + return float(q2(start_bal)), float(running), ledger + +@app.get("/pto-tracker/print", response_class=HTMLResponse) +@login_required +def pto_tracker_print( + request: Request, + employee_id: int = Query(...), + year: Optional[int] = Query(None), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + emp = db.query(Employee).get(employee_id) + if not emp: + raise HTTPException(status_code=404, detail="Employee not found") + + years = _years_for_employee(db, emp.id) + sel_year = int(year) if year else (years[-1] if years else datetime.utcnow().year) + if sel_year not in years: + years.append(sel_year) + years.sort() + + start_bal, remaining, ledger = _build_pto_ledger(db, emp.id, sel_year) + request.state.now = datetime.utcnow() + + return templates.TemplateResponse( + "pto_tracker_print.html", + { + "request": request, + "hide_nav_links": True, + "employee": emp, + "selected_year": sel_year, + "starting_balance": start_bal, + "remaining_balance": remaining, + "ledger": ledger, + }, + ) + +@app.get("/pto-tracker/print-all", response_class=HTMLResponse) +@login_required +def pto_tracker_print_all( + request: Request, + year: Optional[int] = Query(None), + include_inactive: int = Query(0), + db: Session = Depends(get_session), +): + if not current_is_admin(db, request.session.get("user_id")): + raise HTTPException(status_code=403, detail="Admin access required") + + sel_year = int(year) if year else datetime.utcnow().year + + if include_inactive: + employees = db.query(Employee).order_by(Employee.name.asc()).all() + else: + employees = ( + db.query(Employee) + .filter(text("COALESCE(is_active, TRUE) = TRUE")) + .order_by(Employee.name.asc()) + .all() + ) + + bundles = [] + for emp in employees: + start_bal, remaining, ledger = _build_pto_ledger(db, emp.id, sel_year) + bundles.append({"employee": emp, "starting_balance": start_bal, "remaining_balance": remaining, "ledger": ledger}) + + request.state.now = datetime.utcnow() + return templates.TemplateResponse( + "pto_tracker_print_all.html", + {"request": request, "hide_nav_links": True, "selected_year": sel_year, "bundles": bundles}, + ) + +# Mount the Attendance router again (safe) +app.include_router(attendance_router) + +if __name__ == "__main__": + import uvicorn + uvicorn.run("app.main:app", host="0.0.0.0", port=PORT, log_level="info") \ No newline at end of file diff --git a/app/migrate_to_timesheet_instances.py b/app/migrate_to_timesheet_instances.py new file mode 100644 index 0000000..439e578 --- /dev/null +++ b/app/migrate_to_timesheet_instances.py @@ -0,0 +1,76 @@ +import sys +from datetime import date +from sqlalchemy import text, inspect +from .db import engine, SessionLocal +from .models import TimesheetPeriod, TimeEntry, WeekAssignment, EmployeePeriodSetting, TimesheetStatus +from .utils import _semi_monthly_period_for_date + +def column_exists(inspector, table, column): + return any(c["name"] == column for c in inspector.get_columns(table)) + +def run(): + print("[migrate] Starting timesheet instances migration...") + insp = inspect(engine) + with engine.begin() as conn: + # add columns if missing + if not column_exists(insp, "time_entries", "timesheet_id"): + conn.execute(text("ALTER TABLE time_entries ADD COLUMN timesheet_id INTEGER")) + if not column_exists(insp, "week_assignments", "timesheet_id"): + conn.execute(text("ALTER TABLE week_assignments ADD COLUMN timesheet_id INTEGER")) + if not column_exists(insp, "employee_period_settings", "timesheet_id"): + conn.execute(text("ALTER TABLE employee_period_settings ADD COLUMN timesheet_id INTEGER")) + if not column_exists(insp, "timesheet_status", "timesheet_id"): + conn.execute(text("ALTER TABLE timesheet_status ADD COLUMN timesheet_id INTEGER")) + # NEW: add created_at to timesheet_periods so ordering can later use it safely + if not column_exists(insp, "timesheet_periods", "created_at"): + conn.execute(text("ALTER TABLE timesheet_periods ADD COLUMN created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() NOT NULL")) + + s = SessionLocal() + try: + # derive periods from entries + dates = [r[0] for r in s.query(TimeEntry.work_date).order_by(TimeEntry.work_date.asc()).all()] + period_keys = set() + for d in dates: + ps, pe = _semi_monthly_period_for_date(d) + period_keys.add((ps, pe)) + # also from other tables + was = s.query(WeekAssignment.period_start, WeekAssignment.period_end).group_by(WeekAssignment.period_start, WeekAssignment.period_end).all() + for ps, pe in was: + period_keys.add((ps, pe)) + sts = s.query(TimesheetStatus.period_start, TimesheetStatus.period_end).group_by(TimesheetStatus.period_start, TimesheetStatus.period_end).all() + for ps, pe in sts: + period_keys.add((ps, pe)) + epss = s.query(EmployeePeriodSetting.period_start, EmployeePeriodSetting.period_end).group_by(EmployeePeriodSetting.period_start, EmployeePeriodSetting.period_end).all() + for ps, pe in epss: + period_keys.add((ps, pe)) + + created = {} + for ps, pe in sorted(period_keys): + ts = s.query(TimesheetPeriod).filter(TimesheetPeriod.period_start == ps, TimesheetPeriod.period_end == pe).first() + if not ts: + ts = TimesheetPeriod(period_start=ps, period_end=pe, name=f"{ps.isoformat()} .. {pe.isoformat()}") + s.add(ts) + s.flush() + created[(ps, pe)] = ts.id + + # assign timesheet_id to all rows by their period + entries = s.query(TimeEntry).filter(TimeEntry.timesheet_id.is_(None)).all() + for e in entries: + ps, pe = _semi_monthly_period_for_date(e.work_date) + e.timesheet_id = created.get((ps, pe)) + was = s.query(WeekAssignment).filter(WeekAssignment.timesheet_id.is_(None)).all() + for w in was: + w.timesheet_id = created.get((w.period_start, w.period_end)) + epss = s.query(EmployeePeriodSetting).filter(EmployeePeriodSetting.timesheet_id.is_(None)).all() + for ep in epss: + ep.timesheet_id = created.get((ep.period_start, ep.period_end)) + sts = s.query(TimesheetStatus).filter(TimesheetStatus.timesheet_id.is_(None)).all() + for st in sts: + st.timesheet_id = created.get((st.period_start, st.period_end)) + s.commit() + print(f"[migrate] Done. Created {len(created)} timesheet_periods and backfilled timesheet_id.") + finally: + s.close() + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..24b8140 --- /dev/null +++ b/app/models.py @@ -0,0 +1,147 @@ +from datetime import datetime +from sqlalchemy import ( + Column, + Integer, + String, + Date, + DateTime, + ForeignKey, + UniqueConstraint, + Numeric, + Boolean, # NEW +) +from sqlalchemy.orm import declarative_base + +Base = declarative_base() + + +class User(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True, index=True) + username = Column(String(128), unique=True, nullable=False, index=True) + password_hash = Column(String(256), nullable=False) + + +class Employee(Base): + __tablename__ = "employees" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(256), nullable=False, index=True) + + # NEW: active status + optional termination date + # Default True ensures existing/new employees are treated as active. + is_active = Column(Boolean, nullable=False, default=True) + termination_date = Column(Date, nullable=True) + + +class TimesheetPeriod(Base): + __tablename__ = "timesheet_periods" + + id = Column(Integer, primary_key=True, index=True) + period_start = Column(Date, nullable=False, index=True) + period_end = Column(Date, nullable=False, index=True) + name = Column(String(256), nullable=True) + created_at = Column(DateTime, nullable=False, default=datetime.utcnow, index=True) + + +class TimeEntry(Base): + __tablename__ = "time_entries" + + id = Column(Integer, primary_key=True, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + timesheet_id = Column(Integer, ForeignKey("timesheet_periods.id"), nullable=False, index=True) + + work_date = Column(Date, nullable=False, index=True) + clock_in = Column(DateTime, nullable=True) + clock_out = Column(DateTime, nullable=True) + + pto_clock_in_backup = Column(DateTime, nullable=True) + pto_clock_out_backup = Column(DateTime, nullable=True) + + total_hours = Column(Numeric(12, 2), default=0, nullable=False) + break_hours = Column(Numeric(12, 2), default=0, nullable=False) + + pto_hours = Column(Numeric(12, 2), default=0, nullable=False) + pto_type = Column(String(64), nullable=True) + + holiday_hours = Column(Numeric(12, 2), default=0, nullable=False) + bereavement_hours = Column(Numeric(12, 2), default=0, nullable=False) + + hours_paid = Column(Numeric(12, 2), default=0, nullable=False) + + +class WeekAssignment(Base): + __tablename__ = "week_assignments" + + id = Column(Integer, primary_key=True, index=True) + timesheet_id = Column(Integer, ForeignKey("timesheet_periods.id"), nullable=False, index=True) + + period_start = Column(Date, nullable=False) + period_end = Column(Date, nullable=False) + + day_date = Column(Date, nullable=False, index=True) + week_number = Column(Integer, nullable=False) + + +class EmployeePeriodSetting(Base): + __tablename__ = "employee_period_settings" + + id = Column(Integer, primary_key=True, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + timesheet_id = Column(Integer, ForeignKey("timesheet_periods.id"), nullable=False, index=True) + + period_start = Column(Date, nullable=False) + period_end = Column(Date, nullable=False) + + carry_over_hours = Column(Numeric(12, 2), default=0, nullable=False) + + +class TimesheetStatus(Base): + __tablename__ = "timesheet_status" + + id = Column(Integer, primary_key=True, index=True) + timesheet_id = Column(Integer, ForeignKey("timesheet_periods.id"), nullable=False, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + + period_start = Column(Date, nullable=False) + period_end = Column(Date, nullable=False) + + status = Column(String(32), default="pending", nullable=False) + submitted_at = Column(DateTime, default=datetime.utcnow, nullable=True) + + +class DuplicateReview(Base): + __tablename__ = "duplicate_reviews" + + id = Column(Integer, primary_key=True, index=True) + timesheet_id = Column(Integer, ForeignKey("timesheet_periods.id"), nullable=False, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + work_date = Column(Date, nullable=False, index=True) + + __table_args__ = ( + UniqueConstraint("timesheet_id", "employee_id", "work_date", name="uix_dup_review"), + ) + + +# PTO tracker (per-year starting balance + per-year manual adjustments) +class PTOAccount(Base): + __tablename__ = "pto_accounts" + + id = Column(Integer, primary_key=True, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + year = Column(Integer, nullable=True, index=True) # per-year balance; filled/required by app logic + starting_balance = Column(Numeric(12, 2), default=0, nullable=False) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + +class PTOAdjustment(Base): + __tablename__ = "pto_adjustments" + + id = Column(Integer, primary_key=True, index=True) + employee_id = Column(Integer, ForeignKey("employees.id"), nullable=False, index=True) + year = Column(Integer, nullable=True, index=True) # per-year adjustment; filled/required by app logic + hours = Column(Numeric(12, 2), nullable=False) # positive or negative + note = Column(String(255), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) \ No newline at end of file diff --git a/app/payroll_export.py b/app/payroll_export.py new file mode 100644 index 0000000..5650a98 --- /dev/null +++ b/app/payroll_export.py @@ -0,0 +1,96 @@ +from io import BytesIO +from openpyxl import Workbook +from openpyxl.styles import Alignment, Font +from openpyxl.utils import get_column_letter + +def build_overview_xlsx(period_name: str, rows: list): + """ + Build an Excel workbook for the time period overview. + + period_name: e.g. "Dec 1..Dec 15, 2025" (or your custom name) + rows: list of dicts per employee: + { + "employee_name": str, + "regular": float, + "overtime": float, + "pto": float, + "holiday": float, + "paid_total": float, + "reimbursement": float, + "additional_payroll": float, + "notes": str, + } + """ + wb = Workbook() + ws = wb.active + ws.title = "Overview" + + # Title row + ws["A1"] = period_name + ws["A1"].font = Font(bold=True, size=14) + + headers = [ + "Employee", + "Regular Hours", + "Overtime Hours", + "PTO Hours", + "Holiday", + "Reimbursement", + "Additional Payroll Changes", + "Total hours for pay period", + "Notes", + ] + # Header row at row 2 (keep space for title row) + for col, text in enumerate(headers, start=1): + cell = ws.cell(row=2, column=col, value=text) + cell.font = Font(bold=True) + + # Freeze the header row (row 2) + ws.freeze_panes = "A3" + + # Data + row_idx = 3 + for r in rows: + ws.cell(row=row_idx, column=1, value=r.get("employee_name", "")) + ws.cell(row=row_idx, column=2, value=r.get("regular", 0.0)) + ws.cell(row=row_idx, column=3, value=r.get("overtime", 0.0)) + ws.cell(row=row_idx, column=4, value=r.get("pto", 0.0)) + ws.cell(row=row_idx, column=5, value=r.get("holiday", 0.0)) + ws.cell(row=row_idx, column=6, value=r.get("reimbursement", 0.0) or 0.0) + ws.cell(row=row_idx, column=7, value=r.get("additional_payroll", 0.0) or 0.0) + ws.cell(row=row_idx, column=8, value=r.get("paid_total", 0.0)) + ws.cell(row=row_idx, column=9, value=r.get("notes", "") or "") + row_idx += 1 + + # Number formats and alignment + num_cols = [2, 3, 4, 5, 8] # hour columns and total hours + currency_cols = [6, 7] # reimbursement, additional payroll + wrap_cols = [9] # notes + + max_row = ws.max_row + for r in range(3, max_row + 1): + for c in num_cols: + cell = ws.cell(row=r, column=c) + cell.number_format = "0.00" + cell.alignment = Alignment(horizontal="right") + for c in currency_cols: + cell = ws.cell(row=r, column=c) + cell.number_format = "$#,##0.00" + cell.alignment = Alignment(horizontal="right") + for c in wrap_cols: + cell = ws.cell(row=r, column=c) + cell.alignment = Alignment(wrap_text=True) + + # Autosize columns (basic heuristic) + for col in range(1, len(headers) + 1): + letter = get_column_letter(col) + max_len = len(headers[col - 1]) + for cell in ws[letter]: + val = "" if cell.value is None else str(cell.value) + max_len = max(max_len, len(val)) + ws.column_dimensions[letter].width = min(max_len + 2, 40) + + buf = BytesIO() + wb.save(buf) + buf.seek(0) + return buf.getvalue() \ No newline at end of file diff --git a/app/process_excel.py b/app/process_excel.py new file mode 100644 index 0000000..417c319 --- /dev/null +++ b/app/process_excel.py @@ -0,0 +1,356 @@ +import openpyxl +from datetime import datetime, time, date as date_type, timedelta +from typing import Optional, Dict, Any, List +from sqlalchemy.orm import Session +from decimal import Decimal + +from .models import TimeEntry, Employee +from .utils import D, q2 # use shared Decimal helpers + + +def safe_decimal(value, default=Decimal("0")) -> Decimal: + if value is None or value == "": + return Decimal(default) + if isinstance(value, (int, float)): + # Convert via str to avoid binary float artifacts + return D(value) + if isinstance(value, datetime): + # numeric only + return Decimal(default) + if isinstance(value, str): + try: + return D(value.strip()) + except Exception: + return Decimal(default) + return Decimal(default) + + +def parse_excel_serial_date(serial: float) -> Optional[date_type]: + try: + epoch = datetime(1899, 12, 30) # Excel epoch + return (epoch + timedelta(days=float(serial))).date() + except Exception: + return None + + +def parse_excel_serial_datetime(serial: float) -> Optional[datetime]: + try: + epoch = datetime(1899, 12, 30) + return epoch + timedelta(days=float(serial)) + except Exception: + return None + + +def parse_date(value) -> Optional[date_type]: + if value is None: + return None + if isinstance(value, date_type) and not isinstance(value, datetime): + return value + if isinstance(value, datetime): + return value.date() + if isinstance(value, (int, float)): + return parse_excel_serial_date(value) + if isinstance(value, str): + v = value.strip() + for fmt in [ + "%Y-%m-%d", + "%m/%d/%Y", + "%m/%d/%y", + "%d/%m/%Y", + "%d/%m/%y", + "%Y/%m/%d", + "%m-%d-%Y", + "%d-%m-%Y", + "%B %d, %Y", + "%b %d, %Y", + ]: + try: + return datetime.strptime(v, fmt).date() + except ValueError: + continue + return None + + +def parse_time_value(value) -> Optional[time]: + if value is None or value == "": + return None + if isinstance(value, time): + return value + if isinstance(value, datetime): + return value.time() + if isinstance(value, (int, float)): + # Excel serial time: fraction of a day + try: + seconds = int(round(float(value) * 86400)) + base = datetime(1970, 1, 1) + timedelta(seconds=seconds) + return base.time() + except Exception: + return None + if isinstance(value, str): + v = value.strip() + for fmt in ["%I:%M %p", "%H:%M", "%I:%M:%S %p", "%H:%M:%S"]: + try: + return datetime.strptime(v, fmt).time() + except ValueError: + continue + return None + + +def parse_datetime_value(value) -> Optional[datetime]: + """ + Parse a cell that may contain a full datetime (with date), handling: + - Python datetime objects + - Excel serial datetimes (float or int) + - Strings with date and time + Returns None if only time-of-day is present (use parse_time_value then bind to work_date). + """ + if value is None or value == "": + return None + if isinstance(value, datetime): + return value + if isinstance(value, (int, float)): + dt = parse_excel_serial_datetime(value) + return dt + if isinstance(value, str): + v = value.strip() + has_slash_date = "/" in v or "-" in v + if has_slash_date: + for fmt in [ + "%m/%d/%Y %I:%M:%S %p", + "%m/%d/%Y %I:%M %p", + "%m/%d/%y %I:%M %p", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + "%m/%d/%Y %H:%M:%S", + "%m/%d/%Y %H:%M", + ]: + try: + return datetime.strptime(v, fmt) + except ValueError: + continue + return None + + +def norm(cell_val) -> str: + return str(cell_val).strip().lower() if cell_val is not None else "" + + +def pick_index(header: List[str], include: List[str], exclude: List[str] = []) -> Optional[int]: + for i, h in enumerate(header): + if not h: + continue + if all(tok in h for tok in include) and all(tok not in h for tok in exclude): + return i + return None + + +def detect_header_map(row_values: List[Any]) -> Dict[str, int]: + h = [norm(v) for v in row_values] + idx: Dict[str, int] = {} + + # Required + idx_emp = pick_index(h, ["employee", "name"], []) or pick_index(h, ["employee"], []) or pick_index(h, ["name"], []) + idx_date = pick_index(h, ["date"], []) + if idx_emp is None or idx_date is None: + return {} + + idx["employee"] = idx_emp + idx["work_date"] = idx_date + + # Clock in/out + ci = pick_index(h, ["clock", "in", "time"], []) or pick_index(h, ["clock", "in"], []) or pick_index(h, ["time", "in"], []) + co = pick_index(h, ["clock", "out", "time"], []) or pick_index(h, ["clock", "out"], []) or pick_index(h, ["time", "out"], []) + if ci is not None: + idx["clock_in"] = ci + if co is not None: + idx["clock_out"] = co + + # Break + br = ( + pick_index(h, ["breaks", "hours", "taken"], []) + or pick_index(h, ["break", "hours", "taken"], []) + or pick_index(h, ["breaks", "taken"], []) + or pick_index(h, ["unpaid", "hours"], []) + or pick_index(h, ["break"], []) + or pick_index(h, ["lunch"], []) + ) + if br is not None: + idx["break_hours"] = br + + # Total: prefer "hours worked" + tot = ( + pick_index(h, ["hours", "worked"], ["minus"]) + or pick_index(h, ["worked"], ["minus"]) + ) + if tot is not None: + idx["total_hours"] = tot + else: + alt = pick_index(h, ["hours", "worked", "minus", "break"], []) or pick_index(h, ["worked", "minus", "break"], []) + if alt is not None: + idx["total_minus_break"] = alt + + # PTO hours + ptoh = ( + pick_index(h, ["pto", "hours"], []) + or pick_index(h, ["sick", "hours"], []) + or pick_index(h, ["vacation", "hours"], []) + or pick_index(h, ["vacation"], []) + ) + if ptoh is not None: + idx["pto_hours"] = ptoh + + # PTO Type (optional) + ptot = pick_index(h, ["pto", "type"], []) + if ptot is not None: + idx["pto_type"] = ptot + + # Holiday / Bereavement + hol = pick_index(h, ["holiday"], []) + ber = pick_index(h, ["bereavement"], []) + if hol is not None: + idx["holiday_hours"] = hol + if ber is not None: + idx["bereavement_hours"] = ber + + return idx + + +def import_workbook(path: str, db: Session, timesheet_id: Optional[int] = None) -> Dict[str, Any]: + wb = openpyxl.load_workbook(path, data_only=True) + + inserted_total = 0 + employees_seen = set() + sheets_processed = 0 + + for ws in wb.worksheets: + # Find header row + header_map: Dict[str, int] = {} + header_row_idx = None + for r in range(1, min(ws.max_row, 30) + 1): + row_values = [cell.value for cell in ws[r]] + header_map = detect_header_map(row_values) + if header_map: + header_row_idx = r + break + if not header_map or not header_row_idx: + continue + + sheets_processed += 1 + + # Parse rows + for r in range(header_row_idx + 1, ws.max_row + 1): + vals = [cell.value for cell in ws[r]] + + emp_raw = vals[header_map["employee"]] if len(vals) > header_map["employee"] else None + date_raw = vals[header_map["work_date"]] if len(vals) > header_map["work_date"] else None + if not emp_raw or not date_raw: + continue + + employee_name = str(emp_raw).strip() + work_date = parse_date(date_raw) + if not work_date: + continue + + # Optional fields as Decimal + clock_in_dt: Optional[datetime] = None + clock_out_dt: Optional[datetime] = None + break_hours_taken = D(0) + pto_hours = D(0) + pto_type = None + holiday_hours = D(0) + bereavement_hours = D(0) + total_from_sheet: Optional[Decimal] = None + alt_total_minus_break: Optional[Decimal] = None + + # Clock In/Out: prefer full datetime if present in the cell; else bind time to work_date + if "clock_in" in header_map and len(vals) > header_map["clock_in"]: + ci_raw = vals[header_map["clock_in"]] + clock_in_dt = parse_datetime_value(ci_raw) + if not clock_in_dt: + t = parse_time_value(ci_raw) + clock_in_dt = datetime.combine(work_date, t) if t else None + + if "clock_out" in header_map and len(vals) > header_map["clock_out"]: + co_raw = vals[header_map["clock_out"]] + clock_out_dt = parse_datetime_value(co_raw) + if not clock_out_dt: + t = parse_time_value(co_raw) + clock_out_dt = datetime.combine(work_date, t) if t else None + + # Overnight + if clock_in_dt and clock_out_dt and clock_out_dt <= clock_in_dt: + clock_out_dt = clock_out_dt + timedelta(days=1) + + # Breaks Hours Taken + if "break_hours" in header_map and len(vals) > header_map["break_hours"]: + break_hours_taken = safe_decimal(vals[header_map["break_hours"]], D(0)) + + # Total = Hours Worked (preferred) + if "total_hours" in header_map and len(vals) > header_map["total_hours"]: + total_from_sheet = safe_decimal(vals[header_map["total_hours"]], None) + + # Fallback: Hours Worked Minus Break Hours -> infer Hours Worked by adding back break + if "total_minus_break" in header_map and len(vals) > header_map["total_minus_break"]: + alt_total_minus_break = safe_decimal(vals[header_map["total_minus_break"]], None) + + # PTO / Holiday / Bereavement + if "pto_hours" in header_map and len(vals) > header_map["pto_hours"]: + pto_hours = safe_decimal(vals[header_map["pto_hours"]], D(0)) + if "pto_type" in header_map and len(vals) > header_map["pto_type"]: + v = vals[header_map["pto_type"]] + pto_type = (str(v).strip() if v is not None and not isinstance(v, (int, float, datetime)) else None) + if "holiday_hours" in header_map and len(vals) > header_map["holiday_hours"]: + holiday_hours = safe_decimal(vals[header_map["holiday_hours"]], D(0)) + if "bereavement_hours" in header_map and len(vals) > header_map["bereavement_hours"]: + bereavement_hours = safe_decimal(vals[header_map["bereavement_hours"]], D(0)) + + # Determine Total Hours (Hours Worked) + if total_from_sheet is None: + if alt_total_minus_break is not None and break_hours_taken is not None: + total_from_sheet = q2(alt_total_minus_break + break_hours_taken) + else: + if clock_in_dt and clock_out_dt: + total_from_sheet = q2(D((clock_out_dt - clock_in_dt).total_seconds()) / D(3600)) + else: + total_from_sheet = D(0) + else: + total_from_sheet = q2(total_from_sheet) + + # Force PTO type review: if PTO hours are present (including Vacation), blank pto_type + if pto_hours > D(0): + pto_type = None + + # Employee + emp = db.query(Employee).filter(Employee.name == employee_name).first() + if not emp: + emp = Employee(name=employee_name) + db.add(emp) + db.flush() + employees_seen.add(emp.id) + + entry = TimeEntry( + employee_id=emp.id, + work_date=work_date, + clock_in=clock_in_dt, + clock_out=clock_out_dt, + break_hours=q2(break_hours_taken), + total_hours=q2(total_from_sheet), + pto_hours=q2(pto_hours), + pto_type=pto_type, + holiday_hours=q2(holiday_hours), + bereavement_hours=q2(bereavement_hours), + timesheet_id=timesheet_id, + ) + + # Hours Paid = (Total - Break) + PTO + Holiday + Bereavement + worked = (D(entry.total_hours or 0) - D(entry.break_hours or 0)) + if worked < D(0): + worked = D(0) + entry.hours_paid = q2(worked + D(entry.pto_hours or 0) + D(entry.holiday_hours or 0) + D(entry.bereavement_hours or 0)) + + db.add(entry) + inserted_total += 1 + + db.commit() + print(f"[import_workbook] Sheets processed: {sheets_processed}, rows inserted: {inserted_total}, employees: {len(employees_seen)}") + return {"rows": inserted_total, "employees": len(employees_seen)} \ No newline at end of file diff --git a/app/routes/clock_edit.py b/app/routes/clock_edit.py new file mode 100644 index 0000000..fb9424e --- /dev/null +++ b/app/routes/clock_edit.py @@ -0,0 +1,98 @@ +from datetime import datetime, timedelta +from typing import Optional + +from fastapi import APIRouter, Depends, Form, Request +from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session + +from ..db import get_db +from ..models import TimeEntry + +router = APIRouter() + +def _parse_dt_local(v: Optional[str]) -> Optional[datetime]: + """ + Parse HTML datetime-local values safely: + - 'YYYY-MM-DDTHH:MM' + - 'YYYY-MM-DDTHH:MM:SS' + Returns None for blank/None. + """ + if not v: + return None + v = v.strip() + if not v: + return None + try: + # Python 3.11+ supports fromisoformat for both shapes + return datetime.fromisoformat(v) + except Exception: + for fmt in ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M"): + try: + return datetime.strptime(v, fmt) + except ValueError: + continue + return None + +def _safe_float(v, default=None): + if v is None or v == "": + return default + try: + return float(v) + except Exception: + return default + +@router.post("/timesheet/update-clocks") +async def update_clocks( + request: Request, + entry_id: int = Form(...), + clock_in: Optional[str] = Form(None), + clock_out: Optional[str] = Form(None), + recalc_total: Optional[str] = Form(None), + redirect_to: Optional[str] = Form(None), + db: Session = Depends(get_db), +): + """ + Update only the clock_in/clock_out fields for a time entry. + Optionally recalculate total_hours from clocks when recalc_total=1. + Re-applies hours_paid = max(0, total - break) + PTO + Holiday + Bereavement. + """ + # Optional: enforce admin-only + is_admin = bool(getattr(request, "session", {}).get("is_admin")) if hasattr(request, "session") else True + if not is_admin: + # Fall back to normal redirect with no change + return RedirectResponse(url=redirect_to or "/", status_code=303) + + entry: TimeEntry = db.query(TimeEntry).filter(TimeEntry.id == entry_id).first() + if not entry: + return RedirectResponse(url=redirect_to or "/", status_code=303) + + new_ci = _parse_dt_local(clock_in) + new_co = _parse_dt_local(clock_out) + + # Apply if provided; if one is provided and the other left blank, keep the existing other value + if new_ci is not None: + entry.clock_in = new_ci + if new_co is not None: + entry.clock_out = new_co + + # Handle overnight: if both set and out <= in, assume next day + if entry.clock_in and entry.clock_out and entry.clock_out <= entry.clock_in: + entry.clock_out = entry.clock_out + timedelta(days=1) + + # Optionally recompute total from clocks + if recalc_total == "1" and entry.clock_in and entry.clock_out: + entry.total_hours = round((entry.clock_out - entry.clock_in).total_seconds() / 3600.0, 2) + + # Recompute hours_paid using the canonical rule + total = _safe_float(entry.total_hours, 0.0) or 0.0 + brk = _safe_float(entry.break_hours, 0.0) or 0.0 + pto = _safe_float(entry.pto_hours, 0.0) or 0.0 + hol = _safe_float(entry.holiday_hours, 0.0) or 0.0 + oth = _safe_float(entry.bereavement_hours, 0.0) or 0.0 + worked = max(0.0, total - brk) + entry.hours_paid = round(worked + pto + hol + oth, 2) + + db.add(entry) + db.commit() + + return RedirectResponse(url=redirect_to or "/", status_code=303) \ No newline at end of file diff --git a/app/routes/timesheet_api.py b/app/routes/timesheet_api.py new file mode 100644 index 0000000..a8d8738 --- /dev/null +++ b/app/routes/timesheet_api.py @@ -0,0 +1,32 @@ +from fastapi import APIRouter, Depends, Form, Request, HTTPException +from sqlalchemy.orm import Session +from sqlalchemy import text + +from ..db import get_session +from ..models import TimeEntry + +router = APIRouter(prefix="/timesheet", tags=["Timesheet API"]) + +@router.post("/delete-entry") +def delete_entry( + request: Request, + entry_id: int = Form(...), + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + # Require edit/admin permission (mirror your other viewers) + if not (request.session.get("is_admin") or request.session.get("can_edit")): + raise HTTPException(status_code=403, detail="Edit access required") + + te = db.query(TimeEntry).get(entry_id) + if not te or int(te.timesheet_id) != int(timesheet_id): + raise HTTPException(status_code=404, detail="Time entry not found for this time period") + + # Remove import-batch linkage first (safe if none exist) + db.execute(text("DELETE FROM import_batch_items WHERE time_entry_id = :eid"), {"eid": entry_id}) + + # Delete the time entry + db.delete(te) + db.commit() + + return {"ok": True} \ No newline at end of file diff --git a/app/routes/viewer.py b/app/routes/viewer.py new file mode 100644 index 0000000..c9f1e89 --- /dev/null +++ b/app/routes/viewer.py @@ -0,0 +1,41 @@ +from fastapi import APIRouter, Depends, Form, Request, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session +from sqlalchemy import text + +from ..db import get_session + +router = APIRouter(tags=["Viewer"]) + +@router.post("/viewer/delete-period") +def delete_period( + request: Request, + timesheet_id: int = Form(...), + db: Session = Depends(get_session), +): + if not request.session.get("is_admin"): + raise HTTPException(status_code=403, detail="Admin access required") + + tid = int(timesheet_id) + + # Cascade delete department import artifacts first + db.execute( + text( + "DELETE FROM import_batch_items " + "WHERE batch_id IN (SELECT id FROM import_batches WHERE timesheet_id = :tid)" + ), + {"tid": tid}, + ) + db.execute(text("DELETE FROM import_batches WHERE timesheet_id = :tid"), {"tid": tid}) + + # Delete all time entries for the period + db.execute(text("DELETE FROM time_entries WHERE timesheet_id = :tid"), {"tid": tid}) + + # Delete week assignments to fully reset the period + db.execute(text("DELETE FROM week_assignments WHERE timesheet_id = :tid"), {"tid": tid}) + + # If your original route removed the TimesheetPeriod record, keep that behavior: + # db.execute(text("DELETE FROM timesheet_periods WHERE id = :tid"), {"tid": tid}) + + db.commit() + return RedirectResponse(url="/viewer?msg=Time+period+deleted", status_code=303) \ No newline at end of file diff --git a/app/static/styles.css b/app/static/styles.css new file mode 100644 index 0000000..5cd0b84 --- /dev/null +++ b/app/static/styles.css @@ -0,0 +1,162 @@ +/* Modern light theme with subtle shadows and good spacing */ +:root{ + --bg:#f5f7fb; + --text:#0f172a; + --muted:#64748b; + --card:#ffffff; + --line:#e2e8f0; + --brand:#0ea5e9; + --brand-600:#0284c7; + --primary:#2563eb; + --primary-600:#1d4ed8; + --success:#16a34a; + --warn:#f59e0b; + --danger:#ef4444; + --dup:#fff7e6; + + --radius:12px; + --shadow: 0 12px 28px rgba(15,23,42,.08), 0 2px 6px rgba(15,23,42,.06); + + --mono: ui-monospace, SFMono-Regular, Menlo, Consolas, "Liberation Mono", monospace; + --font: Inter, system-ui, -apple-system, Segoe UI, Roboto, "Helvetica Neue", Arial, sans-serif; +} + +*{box-sizing:border-box} +html,body{height:100%} +body.tk{ + margin:0; + background:var(--bg); + color:var(--text); + font-family:var(--font); + line-height:1.5; +} + +/* Header / nav */ +.tk-header{ + position:sticky;top:0;z-index:10; + background:#ffffffc0; backdrop-filter:saturate(180%) blur(12px); + border-bottom:1px solid var(--line); +} +.tk-header-inner{ + max-width:1200px;margin:0 auto;display:flex;align-items:center;justify-content:space-between; + padding:12px 20px; +} +.tk-brand{ + font-weight:800;letter-spacing:.2px;text-decoration:none;color:var(--brand-600); +} +.tk-nav{display:flex;gap:12px} +.tk-nav-link{ + color:var(--muted);text-decoration:none;padding:8px 10px;border-radius:8px;transition:.15s; +} +.tk-nav-link:hover{color:var(--text);background:#f1f5f9} +.tk-nav-link.danger{color:var(--danger)} + +/* Main page container */ +.tk-container{ + max-width:100%; + margin:18px auto; + padding:0 20px; + display:flex; + justify-content:center; /* default for most pages */ + align-items:flex-start; +} + +/* Viewer: make the editor span full width and cancel left/right padding so the sidebar sits at the window edge */ +.page-wide{ + flex:1 1 auto; /* occupy full width of the container */ + margin:0 -20px; /* negate tk-container left/right padding */ + width:calc(100% + 40px); /* keep content full width after negative margins */ +} + +/* Layout */ +.editor-grid{ + display:grid; + grid-template-columns:280px minmax(0,1fr); /* fixed sidebar + fluid editor */ + gap:18px; + width:100%; +} +@media (max-width:960px){.editor-grid{grid-template-columns:1fr}} + +.panel{ + background:var(--card); + border:1px solid var(--line); + border-radius:var(--radius); + box-shadow:var(--shadow); + padding:14px; +} +.panel.sidebar{position:sticky;top:84px;height:fit-content} +.panel.main{min-height:480px} +.panel.toolbar{display:flex;align-items:center;gap:10px;flex-wrap:wrap} +.panel-title{font-weight:700;margin-bottom:8px} +.divider{height:1px;background:var(--line);margin:12px 0} + +/* Sidebar */ +.emp-list{list-style:none;margin:0;padding:0;max-height:52vh;overflow:auto} +.emp-list li{margin:0} +.emp-list li a{display:block;padding:8px 10px;border-radius:10px;text-decoration:none;color:var(--text)} +.emp-list li.active a{background:#eaf2ff;border:1px solid #c9dbff} +.emp-list li a:hover{background:#f7fafc} +.emp-list .empty{color:var(--muted);padding:6px 8px} +.actions .btn{margin-bottom:8px} +.w-full{width:100%} + +/* Controls */ +.label{color:var(--muted);margin-right:8px} +.input,.select{ + background:#fff;border:1px solid var(--line);color:var(--text); + padding:8px 10px;border-radius:10px;outline:none;min-width:90px; + transition:.15s border-color, .15s box-shadow; +} +.input:focus,.select:focus{border-color:#93c5fd;box-shadow:0 0 0 4px rgba(147,197,253,.35)} +.inline{display:inline-flex;gap:10px;align-items:center} + +/* Buttons */ +.btn{ + appearance:none;border:1px solid var(--line);background:#f8fafc;color:var(--text); + padding:10px 14px;border-radius:12px;text-decoration:none;cursor:pointer; + transition:.15s filter, .15s transform; +} +.btn:hover{filter:brightness(1.02)} +.btn.primary{background:linear-gradient(180deg,#60a5fa,#3b82f6);color:white;border-color:#3b82f6} +.btn.primary.sm{padding:8px 12px;border-radius:10px} +.btn.danger{background:linear-gradient(180deg,#fca5a5,#ef4444);color:white;border-color:#ef4444} + +/* Alerts */ +.alert{border-radius:12px;padding:12px} +.alert.success{background:#ecfdf5;border:1px solid #a7f3d0} +.alert.warn{background:#fff7ed;border:1px solid #fed7aa} +.mb-8{margin-bottom:8px} +.mt-8{margin-top:8px} + +/* Totals */ +.panel.totals{ + display:grid;grid-template-columns:repeat(6,1fr);gap:12px; +} +.total{background:#f8fafc;border:1px solid var(--line);border-radius:10px;padding:10px} +.t-label{color:var(--muted);font-size:12px;margin-bottom:4px} +.t-val{font-weight:800} + +/* Table */ +.table-wrap{overflow:auto} +.table{ + width:100%;border-collapse:separate;border-spacing:0; + font-size:14px; +} +.table thead th{ + position:sticky;top:0;background:#f1f5f9;border-bottom:1px solid var(--line); + text-align:left;padding:10px;font-weight:700; +} +.table td{ + border-bottom:1px solid var(--line); + padding:8px 10px;vertical-align:middle; +} +.table .num{text-align:right} +.table .mono{font-family:var(--mono)} +.table.compact td,.table.compact th{padding:8px} +.table tr.total td{font-weight:700;border-top:2px solid var(--line)} +.dup-row{background:var(--dup)} +.table input.input{width:110px} +.table .select{width:130px} + +/* Utility */ +.mr-8{margin-right:8px} \ No newline at end of file diff --git a/app/templates/admin_employees.html b/app/templates/admin_employees.html new file mode 100644 index 0000000..76a6f49 --- /dev/null +++ b/app/templates/admin_employees.html @@ -0,0 +1,72 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Employee Management
+ +
+ + +
+ +
+ + + + + + + + + + + {% for e in employees %} + {% set active = (e.is_active is not none and e.is_active) %} + + + + + + + {% endfor %} + +
NameStatusTermination DateActions
{{ e.name }} + {% if active %} + Active + {% else %} + Inactive + {% endif %} + + {% if e.termination_date %}{{ e.termination_date.isoformat() }}{% endif %} + + {% if active %} +
+ + + + + + +
+ {% else %} +
+ + + + +
+ {% endif %} +
+
+ +
+ Marking an employee inactive hides them from PTO Tracker and Print All (unless “Show inactive†is checked). Reactivation restores visibility. Termination date is optional. +
+
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/admin_users.html b/app/templates/admin_users.html new file mode 100644 index 0000000..ef9ffd1 --- /dev/null +++ b/app/templates/admin_users.html @@ -0,0 +1,84 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
User Management
+ + +
+ + + + + + + + + + + + + +
+ + +
+ + + + + + + + + + + {% for u in users %} + + + + + + + {% endfor %} + +
Full NameUsernameRoleActions
{{ profiles.get(u.id, '') }}{{ u.username }} + {% if u.id in admin_ids %} + Admin + {% else %} + User + {% endif %} + + +
+ + + + +
+ + {% if u.id not in admin_ids %} +
+ + + +
+ {% endif %} + +
+ + +
+
+
+ + {% if flash %} +
{{ flash }}
+ {% endif %} +
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/assign_weeks.html b/app/templates/assign_weeks.html new file mode 100644 index 0000000..bf52e3a --- /dev/null +++ b/app/templates/assign_weeks.html @@ -0,0 +1,71 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Assign Weeks
+

Assign a week number (1-3) for each date in {{ period }}. These settings compute weekly OT.

+ + {% if request.session.get('is_admin') %} +
+ + +
+ + + +
+ +
+ + + + + + + + + {% for d in days %} + + + + + {% endfor %} + +
DateWeek
{{ d.strftime("%A, %B %d, %Y") }} + +
+
+ +
+ + + +
+
+ + +
+ + +
+ + {% else %} +
+ You have view/print-only access. Assigning weeks is restricted to administrators. +
+ + {% endif %} +
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/attendance.html b/app/templates/attendance.html new file mode 100644 index 0000000..674791d --- /dev/null +++ b/app/templates/attendance.html @@ -0,0 +1,106 @@ +{% extends "layout.html" %} +{% block content %} + + + +
+
+
Attendance Tracker
+ +
+ + + + + + + + + + + + + + Export CSV + +
+ +
+
Worked
+
Off
+
Sick
+
PTO
+
Holiday
+
Other
+
Weekend
+
No data (no submissions in range)
+
+
+ +
+
Period Overview
+
+ {% for row in visual %} +
+
{{ row.employee.name }}
+
+ {% for c in row.cells %} +
+ {% endfor %} +
+
+
+
+
+
Worked days: {{ row.totals.worked_days }} ({{ row.hours.worked|round(2) }} hrs)
+
Off days: {{ row.totals.off_days }} ({{ row.hours.off|round(2) }} hrs)
+
Sick days: {{ row.totals.sick_days }} ({{ row.hours.sick|round(2) }} hrs)
+
PTO days: {{ row.totals.pto_days }} ({{ row.hours.pto|round(2) }} hrs)
+
Holiday days: {{ row.totals.holiday_days }} ({{ row.hours.holiday|round(2) }} hrs)
+
Other days: {{ row.totals.other_days }} ({{ row.hours.other|round(2) }} hrs)
+
+
+
+ {% endfor %} +
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/app/templates/dept_importer_map.html b/app/templates/dept_importer_map.html new file mode 100644 index 0000000..0323e1f --- /dev/null +++ b/app/templates/dept_importer_map.html @@ -0,0 +1,94 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Map Columns {% if kind == 'excel' %}& Select Sheet{% endif %}
+ +
+ + + + +
+ {% if kind == 'excel' and sheets_info|length > 1 %} + + + {% else %} + +
Source: {{ sheet_name }}
+ {% endif %} + + + Start over + Back to Viewer +
+ +
+ Choose which columns from the {{ 'sheet' if kind == 'excel' else 'file' }} map to your fields. Defaults are auto-detected. +
+ + {% set selected = (sheets_info | selectattr('sheet_name', 'equalto', sheet_name) | list | first) %} + {% set headers = selected.header_vals %} + {% set auto = selected.auto_map %} + +
+ + + + + + + + + {% for key, label, required in target_fields %} + + + + + {% endfor %} + +
FieldColumn
+ {{ label }} + {% if required %}Required{% endif %} + + +
+
+ +
+ + +
+
+ +
+ +
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/app/templates/dept_importer_preview.html b/app/templates/dept_importer_preview.html new file mode 100644 index 0000000..ea4ee40 --- /dev/null +++ b/app/templates/dept_importer_preview.html @@ -0,0 +1,69 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Preview Import
+ +
+ + + + +
+ + + + + + + + + + + {% for row in preview %} + + + + + + + {% endfor %} + +
EmployeeStatusRows
+ + {{ row.employee_name }}{{ row.status }}{{ row.row_count }}
+
+ + +
+ + Start over + Back to Viewer +
+
+ +
+ Existing employees will have new rows appended (duplicates are skipped). New employees will be created automatically. + {% if mode == 'initial' %} + After import, the time period dates will be derived from the imported rows and you’ll assign weeks. + {% else %} + Dates not previously assigned to a week in this time period will be skipped. + {% endif %} +
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/app/templates/dept_importer_upload.html b/app/templates/dept_importer_upload.html new file mode 100644 index 0000000..27dea8a --- /dev/null +++ b/app/templates/dept_importer_upload.html @@ -0,0 +1,33 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Import Another Department
+
+ + + + + + + + + + Back to Viewer +
+ +
+ Supported files: CSV, XLSX. Expected columns include Employee Name, Date, Clock In, Clock Out, Break Hours, PTO Hours, PTO Type. + We’ll normalize whatever headers you have. +
+
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/employees.html b/app/templates/employees.html new file mode 100644 index 0000000..47040d3 --- /dev/null +++ b/app/templates/employees.html @@ -0,0 +1,25 @@ +{% extends "layout.html" %} +{% block content %} +

Employees

+
+ + +
+ + + + {% for e in employees %} + + + + + {% endfor %} + +
NameActions
{{ e.name }} + Timesheet + Overview +
+{% if not employees %} +

No employees yet. Import an Excel file first.

+{% endif %} +{% endblock %} \ No newline at end of file diff --git a/app/templates/layout.html b/app/templates/layout.html new file mode 100644 index 0000000..7ece67a --- /dev/null +++ b/app/templates/layout.html @@ -0,0 +1,164 @@ + + + + + TimeKeeper + + + + + + +
+
+ TimeKeeper + {% if not hide_nav_links %} + + {% endif %} +
+
+ +
+ {% block content %}{% endblock %} +
+ + + + + + \ No newline at end of file diff --git a/app/templates/login.html b/app/templates/login.html new file mode 100644 index 0000000..edf8c93 --- /dev/null +++ b/app/templates/login.html @@ -0,0 +1,39 @@ +{% extends "layout.html" %} +{% block content %} + + +
+
+
Welcome back
+
Sign in to continue
+ + {% if error %} +
{{ error }}
+ {% endif %} + +
+
+ + +
+
+ + +
+
+ +
+
+ +
+ Tip: I hope you enjoy all of your free time! P.S. Trevor +
+
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/overview.html b/app/templates/overview.html new file mode 100644 index 0000000..62482c4 --- /dev/null +++ b/app/templates/overview.html @@ -0,0 +1,93 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
+
+
Time Period Overview
+
Period: {{ period_name }}
+
+
+ Back to Review + +
+
+ +
+
+ + + + + + + + + + + + + + {% for b in bundles %} + + + + + + + + + + {% endfor %} + + + + + + + + + + + + +
EmployeeRegularOvertimePTOHolidayOtherPaid Total
{{ b.employee.name }}{{ b.grouped.totals.regular|fmt2 }}{{ b.grouped.totals.overtime|fmt2 }}{{ b.grouped.totals.pto|fmt2 }}{{ b.grouped.totals.holiday|fmt2 }}{{ b.grouped.totals.bereavement|fmt2 }}{{ b.grouped.totals.paid_total|fmt2 }}
Total{{ totals.regular|fmt2 }}{{ totals.overtime|fmt2 }}{{ totals.pto|fmt2 }}{{ totals.holiday|fmt2 }}{{ totals.bereavement|fmt2 }}{{ totals.paid_total|fmt2 }}
+
+
+
+
+ + + + + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/print_overview.html b/app/templates/print_overview.html new file mode 100644 index 0000000..8103204 --- /dev/null +++ b/app/templates/print_overview.html @@ -0,0 +1,169 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Time Period Overview
+
+
Period: {{ period_name }}
+
+
+ + + + + + + + + + + + + + + {% for b in bundles %} + + + + + + + + + + {% endfor %} + + + + + + +
EmployeeRegularOvertimePTOHolidayOtherPaid Total
{{ b.employee.name }}{{ b.grouped.totals.regular|fmt2 }}{{ b.grouped.totals.overtime|fmt2 }}{{ b.grouped.totals.pto|fmt2 }}{{ b.grouped.totals.holiday|fmt2 }}{{ b.grouped.totals.bereavement|fmt2 }}{{ b.grouped.totals.paid_total|fmt2 }}
+ +
+
+ + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/print_timesheet.html b/app/templates/print_timesheet.html new file mode 100644 index 0000000..cae2c22 --- /dev/null +++ b/app/templates/print_timesheet.html @@ -0,0 +1,214 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Timesheet
+
+
Employee: {{ employee.name }}
+
Period: {{ period_name }}
+
+ +
+ Regular: {{ grouped.totals.regular|fmt2 }} + Overtime: {{ grouped.totals.overtime|fmt2 }} + PTO: {{ grouped.totals.pto|fmt2 }} + Holiday: {{ grouped.totals.holiday|fmt2 }} + Other: {{ grouped.totals.bereavement|fmt2 }} + Paid Total: {{ grouped.totals.paid_total|fmt2 }} +
+
+ + + + + + + + + + + + + + + + + + + {% for r in grouped.rows %} + + + + + + + + + + + + + + {% endfor %} + +
DateClock InClock OutBreakTotalPTOPTO TypeHolidayOtherPaid Total
{{ r.work_date }} + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_in|fmt_excel_dt }} + {% endif %} + + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_out|fmt_excel_dt }} + {% endif %} + {{ r.break_hours|fmt2 }}{{ r.total_hours|fmt2 }}{{ r.pto_hours|fmt2 }}{{ r.pto_type or "" }}{{ r.holiday_hours|fmt2 }}{{ r.bereavement_hours|fmt2 }}{{ r.hours_paid|fmt2 }}
+ + + +
+ + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/print_timesheet_bundle.html b/app/templates/print_timesheet_bundle.html new file mode 100644 index 0000000..68d0c07 --- /dev/null +++ b/app/templates/print_timesheet_bundle.html @@ -0,0 +1,218 @@ +{% extends "layout.html" %} +{% block content %} +
+ {% for b in bundles %} +
+
+
Timesheet
+
+
Employee: {{ b.employee.name }}
+
Period: {{ period_name }}
+
+ +
+ Regular: {{ b.grouped.totals.regular|fmt2 }} + Overtime: {{ b.grouped.totals.overtime|fmt2 }} + PTO: {{ b.grouped.totals.pto|fmt2 }} + Holiday: {{ b.grouped.totals.holiday|fmt2 }} + Other: {{ b.grouped.totals.bereavement|fmt2 }} + Paid Total: {{ b.grouped.totals.paid_total|fmt2 }} +
+
+ + + + + + + + + + + + + + + + + + + {% for r in b.grouped.rows %} + + + + + + + + + + + + + + {% endfor %} + +
DateClock InClock OutBreakTotalPTOPTO TypeHolidayOtherPaid Total
{{ r.work_date }} + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_in|fmt_excel_dt }} + {% endif %} + + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_out|fmt_excel_dt }} + {% endif %} + {{ r.break_hours|fmt2 }}{{ r.total_hours|fmt2 }}{{ r.pto_hours|fmt2 }}{{ r.pto_type or "" }}{{ r.holiday_hours|fmt2 }}{{ r.bereavement_hours|fmt2 }}{{ r.hours_paid|fmt2 }}
+ + + +
+ {% endfor %} +
+ + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/pto_tracker.html b/app/templates/pto_tracker.html new file mode 100644 index 0000000..1958837 --- /dev/null +++ b/app/templates/pto_tracker.html @@ -0,0 +1,147 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
PTO Balance Tracker
+ +
+ + + + + + + + + + + {% if selected_employee %} + + Print + + + Print All + + {% endif %} +
+ + {% if selected_employee %} +
+
Balances
+
+
+ + + + + +
+ +
+
Remaining
+
+
{{ remaining_balance|fmt2 }}
+
+
+
+
+ +
+
Add Adjustment
+
+ + + + + + + +
+
Adjustments apply to the selected year.
+
+ +
+
PTO Ledger ({{ selected_year }}, submitted timesheets only)
+
+ + + + + + + + + + + + {% for row in ledger %} + + + + + + + + {% endfor %} + +
DateDescriptionHours (±)Running BalanceActions
{% if row.date %}{{ row.date.strftime("%b %d, %Y") }}{% endif %}{{ row.desc }}{% if row.delta != '' %}{{ row.delta|fmt2 }}{% endif %}{{ row.balance|fmt2 }} + {% if row.kind == 'adjustment' and row.adj_id %} +
+ + + + +
+ {% elif row.kind == 'usage' and row.u_date is defined %} +
+ + + + + +
+ {% else %} + + {% endif %} +
+
+
+ Usage rows reflect PTO on submitted timesheets within the selected year. “Exclude†hides a specific date/type without altering timesheets. Adjustments and starting balances are year-specific. +
+
+ {% endif %} +
+
+ + +{% endblock %} \ No newline at end of file diff --git a/app/templates/pto_tracker_print.html b/app/templates/pto_tracker_print.html new file mode 100644 index 0000000..42f2287 --- /dev/null +++ b/app/templates/pto_tracker_print.html @@ -0,0 +1,87 @@ +{% extends "layout.html" %} +{% block content %} + + + + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/pto_tracker_print_all.html b/app/templates/pto_tracker_print_all.html new file mode 100644 index 0000000..a9982b1 --- /dev/null +++ b/app/templates/pto_tracker_print_all.html @@ -0,0 +1,89 @@ +{% extends "layout.html" %} +{% block content %} + + + + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/review.html b/app/templates/review.html new file mode 100644 index 0000000..f163b03 --- /dev/null +++ b/app/templates/review.html @@ -0,0 +1,83 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Submitted Timesheets
+ {% if flash %}
{{ flash }}
{% endif %} +
+ + + + + {% if active_ts %} + Time Period Overview + + Export to BA + {% endif %} + + +
+ + {% if submitted and submitted|length > 0 %} + + + + + + {% for row in submitted %} + + + + + + {% endfor %} + +
EmployeeSubmitted AtActions
{{ row.employee_name }}{% if row.submitted_at %}{{ row.submitted_at|fmt_dt }}{% endif %} + View/Edit + +
+ {% else %} +
No submitted timesheets for this period.
+ {% endif %} +
+
+ + + + + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/review_edit.html b/app/templates/review_edit.html new file mode 100644 index 0000000..97543c2 --- /dev/null +++ b/app/templates/review_edit.html @@ -0,0 +1,659 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Review & Edit
+ {% if flash %}
{{ flash }}
{% endif %} +
+
{{ employee.name }} – {{ period_name }}
+
+ Return to Review Timesheets + +
+
+ +
+ + + + + + + Applied to Week 1 regular cap (40 minus carry over). +
+ + +
+
Payroll Extras
+
+ + + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ +
+
+
Saved values appear in the Overview Excel export.
+
+ + + {% if holiday_needs is defined and holiday_needs|length > 0 %} +
+
Holiday review required{% if employee %} for {{ employee.name }}{% endif %}: The dates below include Holiday hours.
+
    + {% for h in holiday_needs %} +
  • {{ h.work_date.strftime("%A, %B %d, %Y") }} – Holiday {{ h.holiday_hours|fmt2 }} hr(s)
  • + {% endfor %} +
+ {% if timesheet_id and employee and can_edit %} +
+
+ + + +
+
+ {% endif %} +
Holiday rows are highlighted in the grid until you click Reviewed. Saving rows does not clear highlights.
+
+ {% endif %} + +
+ + + + + + + + + + + + + + + + + + {% set has_holiday_flags = flagged_holiday_dates is defined and reviewed_holiday_dates is defined %} + {% for r in grouped.rows %} + + {% set highlight_holiday = (has_holiday_flags and (r.work_date in flagged_holiday_dates and r.work_date not in reviewed_holiday_dates)) or (not has_holiday_flags and (r.holiday_hours and r.holiday_hours > 0)) %} + + + + + + + + + + + + + + + + + + + + + + + {% endfor %} + +
DATECLOCK INCLOCK OUTBREAKTOTALPTOPTO TYPEHOLIDAYOTHERPAID
+ {{ r.work_date.strftime("%b %d, %Y") }} + {% if highlight_holiday %} + Holiday – Needs review + {% endif %} + +
+ + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_in|fmt_excel_dt if r.clock_in else '-' }} + {% endif %} + + +
+ +
+
+ + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_out|fmt_excel_dt if r.clock_out else '-' }} + {% endif %} + + +
+ +
+ + + + + + + + + + + + + + + + + {% if can_edit %} + + {% else %} + + {% endif %} +
+
+ +
+
Regular
{{ grouped.totals.regular|fmt2 }}
+
Overtime
{{ grouped.totals.overtime|fmt2 }}
+
PTO
{{ grouped.totals.pto|fmt2 }}
+
Holiday
{{ grouped.totals.holiday|fmt2 }}
+
Other
{{ grouped.totals.bereavement|fmt2 }}
+
Paid Total
{{ grouped.totals.paid_total|fmt2 }}
+
+
+
+ + + + + + + + + + + + +{% endblock %} \ No newline at end of file diff --git a/app/templates/timesheet.html b/app/templates/timesheet.html new file mode 100644 index 0000000..3cf1f52 --- /dev/null +++ b/app/templates/timesheet.html @@ -0,0 +1,209 @@ +from collections import defaultdict +from dataclasses import dataclass +from datetime import date, timedelta +from typing import Dict, List, Tuple, Optional + +from sqlalchemy.orm import Session +from sqlalchemy import func + +from .models import TimeEntry + +@dataclass +class DayRow: + entry_id: int + work_date: date + clock_in: str + clock_out: str + break_hours: float + total_hours: float + pto_hours: float + pto_type: str | None + holiday_hours: float + bereavement_hours: float + hours_paid: float + +def parse_period_selector(selector: Optional[str]) -> Dict: + if not selector: + return {"type": "current_pay_period", "label": "Current Pay Period"} + if ".." in selector: + s, e = selector.split("..", 1) + return {"type": "range", "start": date.fromisoformat(s), "end": date.fromisoformat(e), "label": f"{s}..{e}"} + if len(selector) == 7: + y, m = selector.split("-") + return {"type": "month", "year": int(y), "month": int(m), "label": selector} + if len(selector) == 4: + return {"type": "year", "year": int(selector), "label": selector} + try: + d = date.fromisoformat(selector) + return {"type": "single", "date": d, "label": selector} + except Exception: + return {"type": "current_pay_period", "label": "Current Pay Period"} + +def _start_of_week(d: date, start_weekday: int) -> date: + return d - timedelta(days=(d.weekday() - start_weekday) % 7) + +def compute_period_bounds(selector: Dict, pay_period_type: str, start_weekday: int) -> Tuple[date, date]: + today = date.today() + if selector["type"] == "range": + return selector["start"], selector["end"] + if selector["type"] == "month": + y, m = selector["year"], selector["month"] + start = date(y, m, 1) + if m == 12: + end = date(y, 12, 31) + else: + end = date(y, m + 1, 1) - timedelta(days=1) + return start, end + if selector["type"] == "year": + y = selector["year"] + return date(y, 1, 1), date(y, 12, 31) + + start_week = _start_of_week(today, start_weekday) + if pay_period_type.upper() == "WEEKLY": + return start_week, start_week + timedelta(days=6) + if pay_period_type.upper() == "SEMI_MONTHLY": + if today.day <= 15: + return date(today.year, today.month, 1), date(today.year, today.month, 15) + else: + if today.month == 12: + eom = date(today.year, 12, 31) + else: + eom = date(today.year, today.month + 1, 1) - timedelta(days=1) + return date(today.year, today.month, 16), eom + # BIWEEKLY default + start = start_week + epoch = date(2020, 1, 6) + delta_weeks = ((start - epoch).days // 7) + if delta_weeks % 2 != 0: + start = start - timedelta(days=7) + return start, start + timedelta(days=13) + +def default_week_ranges(start: date, end: date, start_weekday: int) -> List[Tuple[date, date]]: + """ + Produce contiguous week ranges inside [start, end] using start_weekday. + This often yields 2–3 weeks for semi-monthly periods. + """ + ranges: List[Tuple[date, date]] = [] + cursor = start + while cursor <= end: + week_start = cursor + # end of week is based on the configured start_weekday + end_of_week = _start_of_week(cursor, start_weekday) + timedelta(days=6) + week_end = min(end, end_of_week) + ranges.append((week_start, week_end)) + cursor = week_end + timedelta(days=1) + return ranges + +def group_entries_for_timesheet( + entries: List[TimeEntry], + start: date, + end: date, + pay_period_type: str, + start_weekday: int, + week_ranges: Optional[List[Tuple[date, date]]] = None, + carry_over_hours: float = 0.0, +): + rows: List[DayRow] = [] + for e in entries: + rows.append( + DayRow( + entry_id=e.id, + work_date=e.work_date, + clock_in=e.clock_in.strftime("%I:%M %p") if e.clock_in else "", + clock_out=e.clock_out.strftime("%I:%M %p") if e.clock_out else "", + break_hours=round(e.break_hours or 0.0, 2), + total_hours=round(e.total_hours or 0.0, 2), + pto_hours=round(e.pto_hours or 0.0, 2), + pto_type=e.pto_type or "", + holiday_hours=round(e.holiday_hours or 0.0, 2), + bereavement_hours=round(e.bereavement_hours or 0.0, 2), + hours_paid=round(e.hours_paid or (e.total_hours or 0.0), 2), + ) + ) + rows.sort(key=lambda r: (r.work_date, r.clock_in or "")) + + # Week ranges: explicit or default + week_ranges = week_ranges or default_week_ranges(start, end, start_weekday) + + # Map date -> week index + def week_idx(d: date) -> int: + for idx, (ws, we) in enumerate(week_ranges, start=1): + if ws <= d <= we: + return idx + return len(week_ranges) + + weekly_hours = defaultdict(float) + for r in rows: + weekly_hours[week_idx(r.work_date)] += r.total_hours + + weekly_summary = [] + for i, (ws, we) in enumerate(week_ranges, start=1): + base_hours = round(weekly_hours[i], 2) + carry = carry_over_hours if i == 1 else 0.0 + all_with_carry = base_hours + carry + ot = max(0.0, all_with_carry - 40.0) + reg = max(0.0, base_hours - ot) + weekly_summary.append({ + "label": f"Week {i}", + "start": ws, + "end": we, + "all": round(base_hours, 2), + "reg": round(reg, 2), + "ot": round(ot, 2), + }) + + totals = { + "regular": round(sum(ws["reg"] for ws in weekly_summary), 2), + "pto": round(sum(r.pto_hours for r in rows), 2), + "holiday": round(sum(r.holiday_hours for r in rows), 2), + "bereavement": round(sum(r.bereavement_hours for r in rows), 2), + "overtime": round(sum(ws["ot"] for ws in weekly_summary), 2), + "paid_total": round(sum(r.hours_paid for r in rows), 2), + } + + return {"rows": rows, "weekly_summary": weekly_summary, "totals": totals, "week_ranges": week_ranges} + +def compute_yearly_stats(db: Session, employee_id: int, scope: str = "year", year: Optional[int] = None, month: Optional[int] = None): + q = db.query( + func.date_part("year", TimeEntry.work_date).label("y"), + func.date_part("month", TimeEntry.work_date).label("m"), + func.sum(TimeEntry.total_hours).label("total"), + func.sum(TimeEntry.pto_hours).label("pto"), + func.sum(TimeEntry.holiday_hours).label("holiday"), + func.sum(TimeEntry.bereavement_hours).label("bereavement"), + func.sum(TimeEntry.hours_paid).label("paid"), + ).filter(TimeEntry.employee_id == employee_id) + + if scope == "month" and year and month: + q = q.filter(func.date_part("year", TimeEntry.work_date) == year) + q = q.filter(func.date_part("month", TimeEntry.work_date) == month) + elif scope == "year" and year: + q = q.filter(func.date_part("year", TimeEntry.work_date) == year) + + q = q.group_by("y", "m").order_by("y", "m") + rows = q.all() + + data = [] + for y, m, total, pto, holiday, bereavement, paid in rows: + data.append({ + "year": int(y), + "month": int(m), + "total_hours": float(total or 0.0), + "average_daily": round(float(total or 0.0) / 20.0, 2), + "pto": float(pto or 0.0), + "holiday": float(holiday or 0.0), + "bereavement": float(bereavement or 0.0), + "paid": float(paid or 0.0), + }) + return {"rows": data} + +def available_years_for_employee(db: Session, employee_id: int) -> List[int]: + rows = ( + db.query(func.min(TimeEntry.work_date), func.max(TimeEntry.work_date)) + .filter(TimeEntry.employee_id == employee_id) + .first() + ) + if not rows or not rows[0]: + return [] + start, end = rows + return list(range(start.year, end.year + 1)) \ No newline at end of file diff --git a/app/templates/upload.html b/app/templates/upload.html new file mode 100644 index 0000000..d5510b4 --- /dev/null +++ b/app/templates/upload.html @@ -0,0 +1,35 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Import time period workbook
+ {% if error %}
{{ error }}
{% endif %} + + {% if request.session.get('is_admin') %} +
+
+ + +
+
+ + +
+
+ + + Cancel +
+
+ {% else %} +
+ You have view/print-only access. Importing new time periods is restricted to administrators. +
+ + {% endif %} +
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/upload_select.html b/app/templates/upload_select.html new file mode 100644 index 0000000..76c8d3e --- /dev/null +++ b/app/templates/upload_select.html @@ -0,0 +1,30 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
Select Worksheet to Import
+
File: {{ filename }}
+ {% if error %}
{{ error }}
{% endif %} + +
+
+ + +
+ +
+ + +
+ +
+ After import, CI/CO will be date-bound and Total will be calculated in software from CI/CO; Break will sync from "Break Hours Taken" or break start/end. +
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/app/templates/viewer.html b/app/templates/viewer.html new file mode 100644 index 0000000..8cb0bac --- /dev/null +++ b/app/templates/viewer.html @@ -0,0 +1,1146 @@ +{% extends "layout.html" %} +{% block content %} +{% if all_done %} +
+
+
Time Period Complete
+
{{ flash }}
+ +
+ Review Timesheets + {% if can_edit %} + Import New Time Period + {% endif %} +
+
+
+{% else %} +
+
+ + + +
+ {% if flash %}
{{ flash }}
{% endif %} + + +
+
+ {% if selected_employee %} + + {% endif %} + + + +
+ + {% if active_ts and can_edit %} +
+ + +
+ {% endif %} + + {% if active_ts and selected_employee and can_edit %} +
+ + + +
+ {% endif %} + + {% if can_edit %} + Import New Time Period + {% endif %} +
+ + {% if active_ts and selected_employee %} + +
+
Employee Settings
+
+ + + + + + Applied to Week 1 regular cap (40 minus carry over). +
+
+ {% endif %} + + {% if employees and selected_employee and grouped %} + {% if duplicates and duplicates|length > 0 %} +
+
Duplicate dates detected{% if selected_employee %} for {{ selected_employee.name }}{% endif %}:
+
    + {% for d in duplicates %} +
  • {{ d.date.strftime("%A, %B %d, %Y") }} - {{ d.count }} rows
  • + {% endfor %} +
+ + {% if selected_employee and active_ts and can_edit %} +
+
+ + + +
+
+ {% endif %} + +
Duplicate rows are highlighted in the grid until you click Reviewed.
+
+ {% endif %} + + {% if pto_needs and pto_needs|length > 0 %} +
+
PTO review required{% if selected_employee %} for {{ selected_employee.name }}{% endif %}: Missing PTO type on the dates below.
+
    + {% for r in pto_needs %} +
  • {{ r.work_date.strftime("%A, %B %d, %Y") }} – PTO {{ r.pto_hours|fmt2 }} hr(s)
  • + {% endfor %} +
+ {% if selected_employee and active_ts and can_edit %} +
+
+ + + +
+
+ {% endif %} +
Rows needing PTO type stay highlighted until you click Reviewed. Saving rows does not clear highlights.
+
+ {% endif %} + + {% if holiday_needs is defined and holiday_needs|length > 0 %} +
+
Holiday review required{% if selected_employee %} for {{ selected_employee.name }}{% endif %}: Dates below include Holiday hours.
+
    + {% for r in holiday_needs %} +
  • {{ r.work_date.strftime("%A, %B %d, %Y") }} – Holiday {{ r.holiday_hours|fmt2 }} hr(s)
  • + {% endfor %} +
+ {% if selected_employee and active_ts and can_edit %} +
+
+ + + +
+
+ {% endif %} +
Holiday rows are highlighted until you click Reviewed. Saving rows does not clear highlights.
+
+ {% endif %} + + {% if long_shift_needs and long_shift_needs|length > 0 %} +
+
Long shifts detected{% if selected_employee %} for {{ selected_employee.name }}{% endif %} (over 10 hours):
+
    + {% for r in long_shift_needs %} +
  • {{ r.work_date.strftime("%A, %B %d, %Y") }} – {{ r.total_hours|fmt2 }} hours
  • + {% endfor %} +
+ {% if selected_employee and active_ts and can_edit %} +
+
+ + + +
+
+ {% endif %} +
Long-shift rows are highlighted in the grid until you click Reviewed.
+
+ {% endif %} + + +
+
Regular
{{ grouped.totals.regular|fmt2 }}
+
PTO
{{ grouped.totals.pto|fmt2 }}
+
Holiday
{{ grouped.totals.holiday|fmt2 }}
+
Other
{{ grouped.totals.bereavement|fmt2 }}
+
Overtime
{{ grouped.totals.overtime|fmt2 }}
+
Paid Total
{{ grouped.totals.paid_total|fmt2 }}
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% set has_holiday_flags = flagged_holiday_dates is defined and reviewed_holiday_dates is defined %} + {% for r in grouped.rows %} + + {% set highlight_long = (r.work_date in flagged_long_dates and r.work_date not in reviewed_long_dates) %} + {% set highlight_pto = (r.work_date in flagged_pto_dates and r.work_date not in reviewed_pto_dates) %} + {% set highlight_holiday = (has_holiday_flags and (r.work_date in flagged_holiday_dates and r.work_date not in reviewed_holiday_dates)) or (not has_holiday_flags and (r.holiday_hours and r.holiday_hours > 0)) %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% endfor %} + +
DATECLOCK INCLOCK OUTBREAKTOTALPTOPTO TYPEHOLIDAYOTHERPAID
+ {{ r.work_date.strftime("%A, %B %d, %Y") }} + {% if highlight_holiday %} + Holiday – Needs review + {% endif %} + +
+ + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_in|fmt_excel_dt if r.clock_in else '-' }} + {% endif %} + + +
+ +
+
+ + {% if r.holiday_hours and r.holiday_hours > 0 %} + Holiday + {% elif r.pto_type %} + {{ r.pto_type }} + {% else %} + {{ r.clock_out|fmt_excel_dt if r.clock_out else '-' }} + {% endif %} + + +
+ +
+ + + + + + + + + + + + + + + +
+ + {% if can_edit %} + + + {% else %} + + {% endif %} +
+
+
+
+ + +
+
+ + + + + + + + + + + {% for w in grouped.weekly_summary %} + + + + + + + {% endfor %} + + + + + + + +
RegOTWeek Total
{{ w.label }}{{ w.reg|fmt2 }}{{ w.ot|fmt2 }}{{ w.all|fmt2 }}
Total{{ (grouped.weekly_summary|sum(attribute='reg'))|fmt2 }}{{ (grouped.weekly_summary|sum(attribute='ot'))|fmt2 }}{{ (grouped.weekly_summary|sum(attribute='all'))|fmt2 }}
+
+
+ {% else %} +
+
No timesheet instances found.
+ {% if can_edit %} + + {% endif %} +
+ {% endif %} +
+
+
+ + + + + + + + + + + + + + + + +{% endif %} +{% endblock %} \ No newline at end of file diff --git a/app/utils.py b/app/utils.py new file mode 100644 index 0000000..05bd99f --- /dev/null +++ b/app/utils.py @@ -0,0 +1,298 @@ +from dataclasses import dataclass +from datetime import date, datetime, time +from calendar import monthrange +from typing import Dict, List, Optional, Tuple + +from sqlalchemy import func +from sqlalchemy.orm import Session + +from decimal import Decimal, ROUND_HALF_UP, getcontext + +from .models import TimeEntry, TimesheetPeriod + +# Decimal settings for consistent rounding +getcontext().prec = 28 + +def D(x) -> Decimal: + return Decimal(str(x or 0)) + +def q2(x: Decimal) -> Decimal: + return x.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP) + + +# --------------------------- +# Period helpers and listing +# --------------------------- +def _semi_monthly_period_for_date(d: date) -> Tuple[date, date]: + if d.day <= 15: + start = date(d.year, d.month, 1) + end = date(d.year, d.month, 15) + else: + start = date(d.year, d.month, 16) + end = date(d.year, d.month, monthrange(d.year, d.month)[1]) + return start, end + + +def enumerate_timesheets_global(db: Session) -> List[Tuple[int, date, date, str]]: + rows: List[TimesheetPeriod] = ( + db.query(TimesheetPeriod) + .order_by(TimesheetPeriod.period_start.asc(), TimesheetPeriod.period_end.asc(), TimesheetPeriod.id.asc()) + .all() + ) + out: List[Tuple[int, date, date, str]] = [] + for ts in rows: + name = ts.name or f"{ts.period_start.isoformat()}..{ts.period_end.isoformat()}" + out.append((ts.id, ts.period_start, ts.period_end, name)) + return out + + +# --------------------------- +# Viewer/print data shaping +# --------------------------- +@dataclass +class RowOut: + entry_id: int + work_date: date + clock_in: Optional[datetime] + clock_out: Optional[datetime] + break_hours: Decimal + total_hours: Decimal + pto_hours: Decimal + pto_type: Optional[str] + holiday_hours: Decimal + bereavement_hours: Decimal + hours_paid: Decimal + needs_pto_review: bool = False + needs_long_shift_review: bool = False + + +@dataclass +class Totals: + regular: Decimal + pto: Decimal + holiday: Decimal + bereavement: Decimal + overtime: Decimal + paid_total: Decimal + + +@dataclass +class WeekSummary: + label: str + all: Decimal + reg: Decimal + ot: Decimal + + +@dataclass +class Grouped: + rows: List[RowOut] + totals: Totals + weekly_summary: List[WeekSummary] + + +def _to_datetime(d: date, t) -> Optional[datetime]: + if t is None: + return None + if isinstance(t, datetime): + return t + if isinstance(t, time): + return datetime.combine(d, t) + s = str(t).strip() + for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S", "%I:%M:%S %p", "%H:%M:%S", "%H:%M"): + try: + parsed = datetime.strptime(s, fmt) + return parsed.replace(year=d.year, month=d.month, day=d.day) + except Exception: + continue + return None + + +def group_entries_for_timesheet( + entries: List[TimeEntry], + period_start: date, + period_end: date, + week_map: Optional[Dict[date, int]] = None, + carry_over_hours: float = 0.0, +) -> Grouped: + rows: List[RowOut] = [] + week_totals: Dict[int, Decimal] = {} + sum_worked = D(0) + sum_pto = D(0) + sum_holiday = D(0) + sum_bereavement = D(0) + + for e in sorted(entries, key=lambda x: (x.work_date, _to_datetime(x.work_date, x.clock_in) or datetime.min)): + total = D(e.total_hours) + brk = D(e.break_hours) + pto = D(e.pto_hours) + hol = D(e.holiday_hours) + ber = D(e.bereavement_hours) + + worked = total - brk + if worked < D(0): + worked = D(0) + + hours_paid_row = q2(worked + pto + hol + ber) + + rows.append( + RowOut( + entry_id=e.id, + work_date=e.work_date, + clock_in=e.clock_in, + clock_out=e.clock_out, + break_hours=q2(brk), + total_hours=q2(total), + pto_hours=q2(pto), + pto_type=(e.pto_type or None), + holiday_hours=q2(hol), + bereavement_hours=q2(ber), + hours_paid=hours_paid_row, + needs_pto_review=(pto > D(0) and not (e.pto_type or "").strip()), + needs_long_shift_review=(total > D(10)), + ) + ) + + sum_worked += worked + sum_pto += pto + sum_holiday += hol + sum_bereavement += ber + + wk = (week_map or {}).get(e.work_date) + if wk is not None: + week_totals[wk] = week_totals.get(wk, D(0)) + worked + + # Weekly summary + weekly_summary: List[WeekSummary] = [] + carry = D(carry_over_hours) + for wk in sorted(week_totals.keys()): + worked_w = week_totals[wk] + reg_cap = D(40) - (carry if wk == 1 else D(0)) + if reg_cap < D(0): + reg_cap = D(0) + reg_w = worked_w if worked_w <= reg_cap else reg_cap + ot_w = worked_w - reg_w + weekly_summary.append( + WeekSummary( + label=f"Week {wk}", + all=q2(worked_w), + reg=q2(reg_w), + ot=q2(ot_w), + ) + ) + + # Totals + worked_total = q2(sum((w.all for w in weekly_summary), D(0))) + regular_total = q2(sum((w.reg for w in weekly_summary), D(0))) + overtime_total = q2(sum((w.ot for w in weekly_summary), D(0))) + paid_total = q2(worked_total + sum_pto + sum_holiday + sum_bereavement) + + totals = Totals( + regular=regular_total, + pto=q2(sum_pto), + holiday=q2(sum_holiday), + bereavement=q2(sum_bereavement), + overtime=overtime_total, + paid_total=paid_total, + ) + + return Grouped(rows=rows, totals=totals, weekly_summary=weekly_summary) + + +# --------------------------- +# Duplicate merging +# --------------------------- +def _sum_gaps(intervals: List[Tuple[datetime, datetime]]) -> Decimal: + if not intervals: + return D(0) + intervals = sorted(intervals, key=lambda x: x[0]) + gaps_hours = D(0) + current_end = intervals[0][1] + for i in range(1, len(intervals)): + start_i, end_i = intervals[i] + if start_i > current_end: + gaps_hours += D((start_i - current_end).total_seconds()) / D(3600) + if end_i > current_end: + current_end = end_i + return q2(gaps_hours) + + +def merge_duplicates_for_timesheet(db: Session, employee_id: int, timesheet_id: int) -> int: + dup_dates = [ + r[0] + for r in ( + db.query(TimeEntry.work_date) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == employee_id) + .group_by(TimeEntry.work_date) + .having(func.count(TimeEntry.id) > 1) + .all() + ) + ] + + merged_count = 0 + + for d in dup_dates: + entries: List[TimeEntry] = ( + db.query(TimeEntry) + .filter(TimeEntry.timesheet_id == timesheet_id, TimeEntry.employee_id == employee_id, TimeEntry.work_date == d) + .order_by(TimeEntry.clock_in.asc()) + .all() + ) + if len(entries) < 2: + continue + + intervals: List[Tuple[datetime, datetime]] = [] + for e in entries: + ci = _to_datetime(d, e.clock_in) + co = _to_datetime(d, e.clock_out) + if ci and co and co > ci: + intervals.append((ci, co)) + + earliest_in: Optional[datetime] = min((s for s, _ in intervals), default=None) + latest_out: Optional[datetime] = max((e for _, e in intervals), default=None) + + span_hours = D(0) + if earliest_in and latest_out and latest_out > earliest_in: + span_hours = q2(D((latest_out - earliest_in).total_seconds()) / D(3600)) + + break_hours = _sum_gaps(intervals) + pto_hours = q2(sum(D(e.pto_hours) for e in entries)) + holiday_hours = q2(sum(D(e.holiday_hours) for e in entries)) + bereavement_hours = q2(sum(D(e.bereavement_hours) for e in entries)) + pto_type = next((e.pto_type for e in entries if e.pto_type), None) + + worked_hours = span_hours - break_hours + if worked_hours < D(0): + worked_hours = D(0) + hours_paid = q2(worked_hours + pto_hours + holiday_hours + bereavement_hours) + + def keeper_key(e: TimeEntry): + ci = _to_datetime(d, e.clock_in) + return ci or datetime.combine(d, time(0, 0)) + + keeper = min(entries, key=keeper_key) + + # Persist Decimal directly (models use Numeric now) + keeper.total_hours = span_hours + keeper.break_hours = break_hours + keeper.pto_hours = pto_hours + keeper.pto_type = pto_type + keeper.holiday_hours = holiday_hours + keeper.bereavement_hours = bereavement_hours + keeper.hours_paid = hours_paid + + if earliest_in: + keeper.clock_in = earliest_in + if latest_out: + keeper.clock_out = latest_out + + for e in entries: + if e.id != keeper.id: + db.delete(e) + + merged_count += 1 + + if merged_count: + db.commit() + + return merged_count \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2b6c519 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,14 @@ +fastapi==0.115.5 +uvicorn[standard]==0.32.1 +jinja2==3.1.4 +python-multipart==0.0.12 +itsdangerous==2.2.0 +SQLAlchemy==2.0.36 +psycopg[binary]==3.2.3 +alembic==1.14.0 +passlib==1.7.4 +argon2-cffi==23.1.0 +pandas==2.2.3 +openpyxl==3.1.5 +xlrd==1.2.0 +python-dateutil==2.9.0.post0 \ No newline at end of file