commit 9794362f3965e9a8f070191e1ed0af853e10af9d Author: maddin Date: Sun Mar 22 12:55:55 2026 +0000 chore: initialize public repository diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..b9c4cb8 --- /dev/null +++ b/.env.example @@ -0,0 +1,21 @@ +APP_ENV=development +PORT=8000 +DB_URL=sqlite:///./data/stundentracker.db +SESSION_SECRET=change-this-in-production +COOKIE_SECURE=false +COOKIE_SAMESITE=lax +LOGIN_RATE_LIMIT_ATTEMPTS=5 +LOGIN_RATE_LIMIT_WINDOW_MINUTES=15 +DATA_ENCRYPTION_KEY= +PASSWORD_RESET_TOKEN_TTL_MINUTES=60 +MFA_CODE_TTL_MINUTES=10 +MFA_PENDING_TTL_MINUTES=10 +SMTP_TIMEOUT_SECONDS=15 +REGISTRATION_NOTIFY_EMAIL=admin@example.com +APP_NAME=Stundenfuchs +APP_TITLE=Stundenfuchs +APP_VERSION=1.5.12 +EMAIL_VERIFICATION_REQUIRED=true +EMAIL_VERIFICATION_TOKEN_TTL_MINUTES=1440 +BOOTSTRAP_ADMIN_EMAIL= +FORWARDED_ALLOW_IPS=127.0.0.1,::1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..0f1f402 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,40 @@ +name: CI + +on: + push: + pull_request: + +jobs: + checks: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install Python deps + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt djlint pytest + + - name: Install Node deps + run: npm ci + + - name: Run policy checks + run: make policy + + - name: Run linters + run: make lint + + - name: Run tests + run: make test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1358158 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +__pycache__/ +*.pyc +.pytest_cache/ +.venv/ +.env +*.db +*.sqlite +*.sqlite3 +node_modules/ +data/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..4d33d22 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +repos: + - repo: local + hooks: + - id: policy-checks + name: Policy Checks + entry: python3 tools/policy_checks.py + language: system + pass_filenames: false + - id: djlint + name: djlint (jinja/html) + entry: python3 -m djlint app/templates --check + language: system + pass_filenames: false + - id: stylelint + name: stylelint (css) + entry: npm run lint:css + language: system + pass_filenames: false + - id: pytest-fast + name: pytest + entry: python3 -m pytest -q + language: system + pass_filenames: false diff --git a/.stylelintrc.json b/.stylelintrc.json new file mode 100644 index 0000000..5e2ba04 --- /dev/null +++ b/.stylelintrc.json @@ -0,0 +1,34 @@ +{ + "extends": ["stylelint-config-standard"], + "rules": { + "color-no-hex": true, + "selector-class-pattern": null, + "media-feature-range-notation": null, + "color-hex-length": null, + "custom-property-empty-line-before": null, + "declaration-property-unit-disallowed-list": { + "margin": ["px"], + "margin-top": ["px"], + "margin-right": ["px"], + "margin-bottom": ["px"], + "margin-left": ["px"], + "padding": ["px"], + "padding-top": ["px"], + "padding-right": ["px"], + "padding-bottom": ["px"], + "padding-left": ["px"], + "gap": ["px"], + "row-gap": ["px"], + "column-gap": ["px"] + } + }, + "overrides": [ + { + "files": ["app/static/css/tokens.css"], + "rules": { + "color-no-hex": null, + "declaration-property-unit-disallowed-list": null + } + } + ] +} diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..5362187 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.12-slim + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 + +WORKDIR /app + +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +COPY requirements.txt /app/requirements.txt +RUN pip install --no-cache-dir -r /app/requirements.txt + +COPY app /app/app +COPY img /app/img +COPY deploy /app/deploy +COPY .env.example /app/.env.example + +RUN mkdir -p /app/data + +EXPOSE 8000 + +CMD ["sh", "-c", "uvicorn app.main:app --host 0.0.0.0 --port ${PORT:-8000} --proxy-headers --forwarded-allow-ips='*'"] diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..44bffa8 --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ +PUBLIC_DIR ?= ../stundentracker-public + +.PHONY: policy lint test ci public-export public-audit version-suggest version-auto version-tag-live + +policy: + python3 tools/policy_checks.py + +lint: + python3 -m djlint app/templates --check + npm run lint:css + +test: + python3 -m pytest -q + +ci: policy lint test + +public-export: + python3 tools/public_repo.py export $(PUBLIC_DIR) --force --git-init + +public-audit: + python3 tools/public_repo.py audit $(PUBLIC_DIR) + +version-suggest: + python3 tools/versioning.py suggest + +version-auto: + python3 tools/versioning.py apply-auto + +version-tag-live: + python3 tools/versioning.py tag-live diff --git a/README.md b/README.md new file mode 100644 index 0000000..010bc3f --- /dev/null +++ b/README.md @@ -0,0 +1,61 @@ +# Stundenfuchs + +Stundenfuchs ist eine FastAPI-Anwendung zur Erfassung von Arbeitszeiten mit Benutzerkonten, Wochen- und Monatsansicht, Exportfunktionen und einer serverseitig abgesicherten Web-UI. + +## Features + +- Registrierung und Login +- Benutzerbezogene Datentrennung +- Wochen- und Monatsansicht +- Excel-, PDF- und Backup-Export +- Backup-Import +- Urlaub, Feiertage und Krankheitstage +- Mehrfacheingaben für Zeiträume +- E-Mail-Verifikation, Passwort-Reset und MFA +- Admin-Bereich für Benutzer- und Systemeinstellungen + +## Stack + +- FastAPI +- SQLAlchemy +- SQLite +- Jinja2 Templates +- Vanilla JavaScript +- CSS mit zentralen Design-Tokens + +## Lokal starten + +```bash +python3 -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +cp .env.example .env +python -m app.main +``` + +Danach ist die App unter `http://localhost:8000` erreichbar. + +## Docker + +```bash +cp .env.example .env +docker compose up -d --build +``` + +## Qualitätssicherung + +```bash +make policy +make lint +make test +make ci +``` + +## Dokumentation + +- Entwicklung: `docs/DEVELOPMENT.md` +- Sicherheit: `docs/SECURITY.md` + +## Hinweise zur Veröffentlichung + +Dieses Public-Repository enthält bewusst keine produktionsspezifische Infrastruktur, keine internen Betriebsanweisungen und keine Stage-/Live-Dokumentation. Für eine öffentliche Veröffentlichung sollte zusätzlich eine passende `LICENSE` ergänzt werden. diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..41336a1 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +1.5.12 diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/auth.py b/app/auth.py new file mode 100644 index 0000000..1183e43 --- /dev/null +++ b/app/auth.py @@ -0,0 +1,63 @@ +from datetime import datetime, timedelta, timezone +import secrets + +from passlib.context import CryptContext +from sqlalchemy import func, select +from sqlalchemy.orm import Session + +from app.models import LoginAttempt, User + + +pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") + + +def hash_password(password: str) -> str: + return pwd_context.hash(password) + + +def verify_password(password: str, password_hash: str) -> bool: + return pwd_context.verify(password, password_hash) + + +def new_csrf_token() -> str: + return secrets.token_urlsafe(32) + + +def register_failed_attempt(db: Session, email: str, ip_address: str) -> None: + db.add(LoginAttempt(email=email.lower(), ip_address=ip_address, success=False)) + db.commit() + + +def register_successful_attempt(db: Session, email: str, ip_address: str) -> None: + db.add(LoginAttempt(email=email.lower(), ip_address=ip_address, success=True)) + db.commit() + + +def is_login_blocked( + db: Session, + email: str, + ip_address: str, + max_attempts: int, + window_minutes: int, +) -> tuple[bool, int]: + cutoff = datetime.now(timezone.utc) - timedelta(minutes=window_minutes) + + stmt = ( + select(func.count(LoginAttempt.id)) + .where( + LoginAttempt.success.is_(False), + LoginAttempt.created_at >= cutoff, + (LoginAttempt.email == email.lower()) | (LoginAttempt.ip_address == ip_address), + ) + ) + count = db.execute(stmt).scalar_one() + + if count >= max_attempts: + return True, window_minutes + + return False, 0 + + +def find_user_by_email(db: Session, email: str) -> User | None: + stmt = select(User).where(User.email == email.lower()) + return db.execute(stmt).scalar_one_or_none() diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..7b42cd0 --- /dev/null +++ b/app/config.py @@ -0,0 +1,59 @@ +from functools import lru_cache +from pathlib import Path + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +ROOT_DIR = Path(__file__).resolve().parents[1] +VERSION_FILE = ROOT_DIR / "VERSION" + + +def load_default_app_version() -> str: + try: + value = VERSION_FILE.read_text(encoding="utf-8").strip() + except FileNotFoundError: + return "1.0.0" + return value or "1.0.0" + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore") + + app_env: str = Field(default="development", alias="APP_ENV") + port: int = Field(default=8000, alias="PORT") + db_url: str = Field(default="sqlite:///./data/stundentracker.db", alias="DB_URL") + session_secret: str = Field(default="change-this-in-production", alias="SESSION_SECRET") + cookie_secure: bool = Field(default=False, alias="COOKIE_SECURE") + cookie_samesite: str = Field(default="lax", alias="COOKIE_SAMESITE") + login_rate_limit_attempts: int = Field(default=5, alias="LOGIN_RATE_LIMIT_ATTEMPTS") + login_rate_limit_window_minutes: int = Field(default=15, alias="LOGIN_RATE_LIMIT_WINDOW_MINUTES") + data_encryption_key: str | None = Field(default=None, alias="DATA_ENCRYPTION_KEY") + password_reset_token_ttl_minutes: int = Field(default=60, alias="PASSWORD_RESET_TOKEN_TTL_MINUTES") + mfa_code_ttl_minutes: int = Field(default=10, alias="MFA_CODE_TTL_MINUTES") + mfa_pending_ttl_minutes: int = Field(default=10, alias="MFA_PENDING_TTL_MINUTES") + smtp_timeout_seconds: int = Field(default=15, alias="SMTP_TIMEOUT_SECONDS") + registration_notify_email: str = Field(default="admin@example.com", alias="REGISTRATION_NOTIFY_EMAIL") + app_name: str = Field(default="Stundenfuchs", alias="APP_NAME") + app_title: str | None = Field(default=None, alias="APP_TITLE") + app_version: str = Field(default=load_default_app_version(), alias="APP_VERSION") + email_verification_required: bool = Field(default=True, alias="EMAIL_VERIFICATION_REQUIRED") + email_verification_token_ttl_minutes: int = Field(default=60 * 24, alias="EMAIL_VERIFICATION_TOKEN_TTL_MINUTES") + bootstrap_admin_email: str | None = Field(default=None, alias="BOOTSTRAP_ADMIN_EMAIL") + forwarded_allow_ips: str = Field(default="127.0.0.1,::1", alias="FORWARDED_ALLOW_IPS") + + @property + def is_production(self) -> bool: + return self.app_env.lower() == "production" + + @property + def resolved_app_title(self) -> str: + value = (self.app_title or "").strip() + if value: + return value + return self.app_name + + +@lru_cache(maxsize=1) +def get_settings() -> Settings: + return Settings() diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..98256f2 --- /dev/null +++ b/app/database.py @@ -0,0 +1,39 @@ +from collections.abc import Generator + +from sqlalchemy import create_engine +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session, declarative_base, sessionmaker + + +Base = declarative_base() + +_ENGINE: Engine | None = None +_SessionLocal: sessionmaker[Session] | None = None + + +def init_engine(db_url: str) -> Engine: + global _ENGINE, _SessionLocal + + connect_args = {} + if db_url.startswith("sqlite"): + connect_args["check_same_thread"] = False + + _ENGINE = create_engine(db_url, future=True, connect_args=connect_args) + _SessionLocal = sessionmaker(bind=_ENGINE, autoflush=False, autocommit=False, future=True) + return _ENGINE + + +def get_engine() -> Engine: + if _ENGINE is None: + raise RuntimeError("Database engine is not initialized") + return _ENGINE + + +def get_db() -> Generator[Session, None, None]: + if _SessionLocal is None: + raise RuntimeError("SessionLocal is not initialized") + db = _SessionLocal() + try: + yield db + finally: + db.close() diff --git a/app/jobs/__init__.py b/app/jobs/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/jobs/__init__.py @@ -0,0 +1 @@ + diff --git a/app/jobs/autofill_today.py b/app/jobs/autofill_today.py new file mode 100644 index 0000000..0e73187 --- /dev/null +++ b/app/jobs/autofill_today.py @@ -0,0 +1,28 @@ +from sqlalchemy.orm import Session + +from app.database import Base, get_engine, init_engine +from app.config import get_settings +from app.services.migrations import run_startup_migrations +from app.services.targets import ensure_all_users_have_default_target_rules +from app.services.auto_entries import sync_auto_entries_for_all_users + + +def main() -> None: + settings = get_settings() + init_engine(settings.db_url) + engine = get_engine() + Base.metadata.create_all(bind=engine) + run_startup_migrations(engine) + with Session(engine) as db: + ensure_all_users_have_default_target_rules(db) + result = sync_auto_entries_for_all_users(db=db) + db.commit() + print( + "auto_entry_sync users={users} created={created} deleted_future={deleted_future}".format( + **result + ) + ) + + +if __name__ == "__main__": + main() diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..1f4c659 --- /dev/null +++ b/app/main.py @@ -0,0 +1,6322 @@ +from datetime import date, datetime, timedelta, timezone +import json +import logging +import re +import secrets +from urllib.parse import urlencode, urlparse + +from fastapi import Depends, FastAPI, File, Form, HTTPException, Query, Request, UploadFile, status +from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse, Response +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates +from sqlalchemy import case, select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session +from starlette.middleware.sessions import SessionMiddleware + +from app.auth import ( + find_user_by_email, + hash_password, + is_login_blocked, + new_csrf_token, + register_failed_attempt, + register_successful_attempt, + verify_password, +) +from app.config import Settings, get_settings +from app.database import Base, get_db, get_engine, init_engine +from app.models import ( + EmailServerConfig, + ImportPreview, + OvertimeAdjustment, + PasswordResetToken, + SiteContent, + SpecialDayStatus, + SupportTicket, + TimeEntry, + User, + VacationPeriod, + WeeklyTargetRule, +) +from app.schemas import LoginRequest, MFAChallengeRequest, RegisterRequest, TimeEntryCreate, TimeEntryUpdate +from app.services.calculations import ( + aggregate_week, + automatic_break_minutes, + compute_net_minutes, + iso_week_bounds, + minutes_to_hhmm, + parse_time_to_minutes, +) +from app.services.exporters import build_export_rows, create_backup_export, create_excel_export, create_pdf_export +from app.services.importers import ( + BackupImportError, + IMPORT_MODE_REPLACE, + build_import_preview, + cleanup_import_previews, + create_import_preview_record, + execute_backup_import, + get_import_preview_record, + load_backup_payload_from_bytes, + parse_preview_payload, +) +from app.services.legal_content import ( + SITE_CONTENT_IMPRESSUM, + SITE_CONTENT_PRIVACY, + default_site_content_markdown, + normalize_markdown_input, + render_safe_markdown, + ticket_category_label, + ticket_category_options, + ticket_status_label, +) +from app.services.migrations import run_startup_migrations +from app.services.overtime import ( + compute_cumulative_overtime_minutes, + compute_cumulative_overtime_until_date, + compute_effective_span_totals, + compute_effective_week_totals, +) +from app.services.targets import ( + apply_weekly_target_change, + ensure_all_users_have_default_target_rules, + ensure_user_has_default_target_rule, + monday_of, + target_for_week, + target_map_for_weeks, + list_rules_for_user, +) +from app.services.vacations import collapse_dates_to_ranges, expand_vacation_dates, list_vacations_for_user +from app.services.workdays import parse_working_days_csv, serialize_working_days +from app.services.emailing import MailServerSettings, send_email +from app.services.auto_entries import ( + ENTRY_MODE_AUTO_UNTIL_TODAY, + ENTRY_MODE_MANUAL, + autofill_entries_for_range, + clear_auto_entry_skip_for_date, + clear_overtime_adjustment_for_date, + clear_special_status_for_date, + count_as_worktime_dates_for_user, + delete_future_auto_entries, + effective_non_working_dates_for_user, + get_user_working_days, + list_overtime_adjustments_for_user, + list_special_statuses_for_user, + mark_auto_entry_skip_for_date, + overtime_adjustment_map, + overtime_adjustment_minutes_map, + special_status_dates, + special_status_map, + sync_auto_entries_for_all_users, +) +from app.services.public_holidays import ( + GERMAN_STATE_OPTIONS, + list_public_holiday_dates, + normalize_german_state_code, +) +from app.services.security import ( + build_fernet, + build_totp_uri, + decrypt_secret, + encrypt_secret, + generate_numeric_code, + generate_reset_token, + generate_totp_secret, + hash_token, + normalize_otp_code, + utc_now, + verify_totp_code, +) + +logger = logging.getLogger("stundentracker.auth") + +WEEKDAY_NAMES_DE = ["Montag", "Dienstag", "Mittwoch", "Donnerstag", "Freitag", "Samstag", "Sonntag"] +WEEKDAY_SHORT_DE = ["Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] +SPECIAL_DAY_STATUS_HOLIDAY = "holiday" +SPECIAL_DAY_STATUS_SICK = "sick" +OVERTIME_ADJUSTMENT_LABEL = "Stundenausgleich" +DAY_STATUS_QUERY_VACATION = "vacation" +SPECIAL_DAY_STATUS_LABELS = { + SPECIAL_DAY_STATUS_HOLIDAY: "Feiertag", + SPECIAL_DAY_STATUS_SICK: "Krankheitstag", +} +SUPPORT_TICKET_STATUS_OPEN = "open" +SUPPORT_TICKET_STATUS_CLOSED = "closed" +SUPPORT_TICKET_RATE_LIMIT_WINDOW = timedelta(minutes=30) +SUPPORT_TICKET_RATE_LIMIT_MAX_PER_IP = 3 +SUPPORT_TICKET_RATE_LIMIT_MAX_PER_EMAIL = 5 +SUPPORT_TICKET_MIN_FORM_SECONDS = 3 +DAY_STATUS_QUERY_LABELS = { + DAY_STATUS_QUERY_VACATION: "Urlaub", + SPECIAL_DAY_STATUS_HOLIDAY: "Feiertag", + SPECIAL_DAY_STATUS_SICK: "Krankheit", +} +AUTO_HOLIDAY_NOTE_PREFIX = "AUTO_FEIERTAG:" +MFA_METHOD_NONE = "none" +MFA_METHOD_TOTP = "totp" +MFA_METHOD_EMAIL = "email" +MFA_METHOD_LABELS = { + MFA_METHOD_NONE: "Keine 2FA", + MFA_METHOD_TOTP: "Authenticator-App (TOTP)", + MFA_METHOD_EMAIL: "E-Mail Code", +} + + +def create_app(settings_override: Settings | None = None) -> FastAPI: + settings = settings_override or get_settings() + asset_version = datetime.utcnow().strftime("%Y%m%d%H%M%S") + encryption_secret_source = settings.data_encryption_key or settings.session_secret + fernet = build_fernet(encryption_secret_source) + bootstrap_admin_email = (settings.bootstrap_admin_email or "").strip().lower() + + def is_bootstrap_admin_identity(email: str) -> bool: + if not bootstrap_admin_email: + return False + return email.lower().strip() == bootstrap_admin_email + + def ensure_bootstrap_admin(db: Session) -> None: + if not bootstrap_admin_email: + return + candidate = find_user_by_email(db, bootstrap_admin_email) + if candidate and (candidate.role != "admin" or not candidate.is_active): + candidate.role = "admin" + candidate.is_active = True + db.add(candidate) + db.commit() + logger.info("bootstrap_admin_assigned email=%s", candidate.email) + + init_engine(settings.db_url) + Base.metadata.create_all(bind=get_engine()) + run_startup_migrations(get_engine()) + with Session(get_engine()) as startup_db: + ensure_all_users_have_default_target_rules(startup_db) + ensure_bootstrap_admin(startup_db) + + app = FastAPI(title=settings.app_name) + app.mount("/static", StaticFiles(directory="app/static"), name="static") + app.mount("/img", StaticFiles(directory="img"), name="img") + + app.add_middleware( + SessionMiddleware, + secret_key=settings.session_secret, + https_only=settings.cookie_secure, + same_site=settings.cookie_samesite, + max_age=60 * 60 * 24 * 7, + ) + + @app.middleware("http") + async def add_security_headers(request: Request, call_next): + response = await call_next(request) + response.headers.setdefault("X-Content-Type-Options", "nosniff") + response.headers.setdefault("X-Frame-Options", "DENY") + response.headers.setdefault("Referrer-Policy", "strict-origin-when-cross-origin") + response.headers.setdefault( + "Content-Security-Policy", + "default-src 'self'; style-src 'self'; form-action 'self'; frame-ancestors 'none'; base-uri 'self'", + ) + if response.headers.get("content-type", "").startswith("text/html"): + response.headers.setdefault("Cache-Control", "no-store") + return response + + templates = Jinja2Templates(directory="app/templates") + + def weekday_name_de(day: date, *, short: bool = False) -> str: + names = WEEKDAY_SHORT_DE if short else WEEKDAY_NAMES_DE + return names[day.weekday()] + + def build_header_cumulative_minutes(*, db: Session, user: User) -> int: + today = date.today() + working_days = get_user_working_days(user) + rules = list_rules_for_user(db, user.id) + + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date <= today) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + + vacations_stmt = ( + select(VacationPeriod) + .where(VacationPeriod.user_id == user.id, VacationPeriod.start_date <= today) + .order_by(VacationPeriod.start_date.asc()) + ) + vacations = db.execute(vacations_stmt).scalars().all() + special_statuses = list_special_statuses_for_user(db, user.id, date(1970, 1, 1), today) + vacation_dates = expand_vacation_dates(vacations, date(1970, 1, 1), today, relevant_weekdays=working_days) + non_working_dates = effective_non_working_dates_for_user(user=user, special_statuses=special_statuses) + count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates, + special_statuses=special_statuses, + ) + overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, date(1970, 1, 1), today) + + return compute_cumulative_overtime_until_date( + entries=entries, + rules=rules, + weekly_target_fallback=user.weekly_target_minutes, + vacation_periods=vacations, + non_working_dates=non_working_dates, + count_as_worktime_dates=count_as_worktime_dates, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(overtime_adjustments), + as_of_date=today, + overtime_start_date=user.overtime_start_date, + overtime_expiry_days=user.overtime_expiry_days, + expire_negative_overtime=user.expire_negative_overtime, + relevant_weekdays=working_days, + ) + + def build_header_vacation_days(*, db: Session, user: User) -> dict[str, int]: + if not user.vacation_show_in_header: + return { + "year": date.today().year, + "total": 0, + "used": 0, + "remaining": 0, + } + current_year = date.today().year + year_start = date(current_year, 1, 1) + year_end = date(current_year, 12, 31) + working_days = get_user_working_days(user) + + vacations = list_vacations_for_user(db, user.id, year_start, year_end) + vacation_dates = expand_vacation_dates(vacations, year_start, year_end, relevant_weekdays=working_days) + used_days = len([day for day in vacation_dates if day.weekday() in working_days]) + total_days = max(0, user.vacation_days_total or 0) + remaining_days = max(0, total_days - used_days) + + return { + "year": current_year, + "total": total_days, + "used": used_days, + "remaining": remaining_days, + } + + def build_header_workhours_counter_minutes(*, db: Session, user: User) -> int | None: + if not user.workhours_counter_show_in_header: + return None + if not user.workhours_counter_enabled: + return None + if user.workhours_counter_start_date is None or user.workhours_counter_end_date is None: + return None + if user.workhours_counter_end_date < user.workhours_counter_start_date: + return None + return compute_workhours_counter_minutes( + db=db, + user=user, + from_date=user.workhours_counter_start_date, + to_date=user.workhours_counter_end_date, + ) + + def build_main_nav_urls(request: Request, user: User) -> tuple[str, str]: + selected_date: date | None = None + date_value = request.query_params.get("date") + if date_value: + try: + selected_date = parse_date_query(date_value) + except HTTPException: + selected_date = None + + if selected_date is None: + month_value = request.query_params.get("month") + if month_value: + try: + selected_date = datetime.strptime(month_value, "%Y-%m").date() + except ValueError: + selected_date = None + + if selected_date is None: + selected_date = date.today() + + week_url = f"/dashboard?{urlencode({'date': date.today().isoformat()})}" + month_view_mode = request.query_params.get("view") or user.preferred_month_view_mode or "flat" + month_url = f"/month?{urlencode({'month': selected_date.strftime('%Y-%m'), 'view': month_view_mode})}" + return week_url, month_url + + def build_context( + request: Request, + *, + user: User | None = None, + db: Session | None = None, + **extra: object, + ) -> dict: + if user: + main_nav_week_url, main_nav_month_url = build_main_nav_urls(request, user) + extra.setdefault("main_nav_week_url", main_nav_week_url) + extra.setdefault("main_nav_month_url", main_nav_month_url) + needs_cumulative = "header_cumulative_minutes" not in extra + needs_vacation = ( + "header_vacation_days_total" not in extra + or "header_vacation_days_used" not in extra + or "header_vacation_days_remaining" not in extra + or "header_vacation_year" not in extra + ) + needs_workhours_counter = ( + "header_workhours_counter_minutes" not in extra + or "header_workhours_counter_visible" not in extra + ) + + if needs_cumulative or needs_vacation or needs_workhours_counter: + if db is None: + with Session(get_engine()) as context_db: + if needs_cumulative: + extra["header_cumulative_minutes"] = build_header_cumulative_minutes(db=context_db, user=user) + if needs_vacation: + vacation_data = build_header_vacation_days(db=context_db, user=user) + extra["header_vacation_days_total"] = vacation_data["total"] + extra["header_vacation_days_used"] = vacation_data["used"] + extra["header_vacation_days_remaining"] = vacation_data["remaining"] + extra["header_vacation_year"] = vacation_data["year"] + extra["header_vacation_visible"] = user.vacation_show_in_header + if needs_workhours_counter: + extra["header_workhours_counter_minutes"] = build_header_workhours_counter_minutes( + db=context_db, user=user + ) + extra["header_workhours_counter_visible"] = user.workhours_counter_show_in_header + else: + if needs_cumulative: + extra["header_cumulative_minutes"] = build_header_cumulative_minutes(db=db, user=user) + if needs_vacation: + vacation_data = build_header_vacation_days(db=db, user=user) + extra["header_vacation_days_total"] = vacation_data["total"] + extra["header_vacation_days_used"] = vacation_data["used"] + extra["header_vacation_days_remaining"] = vacation_data["remaining"] + extra["header_vacation_year"] = vacation_data["year"] + extra["header_vacation_visible"] = user.vacation_show_in_header + if needs_workhours_counter: + extra["header_workhours_counter_minutes"] = build_header_workhours_counter_minutes(db=db, user=user) + extra["header_workhours_counter_visible"] = user.workhours_counter_show_in_header + extra.setdefault("header_vacation_visible", user.vacation_show_in_header) + + context = { + "request": request, + "user": user, + "csrf_token": ensure_csrf_token(request), + "minutes_to_hhmm": minutes_to_hhmm, + "weekday_name_de": weekday_name_de, + "asset_version": asset_version, + "app_name": settings.app_name, + "app_env": settings.app_env, + "app_title": settings.resolved_app_title, + "app_version": settings.app_version, + "today_date": date.today(), + } + context.update(extra) + return context + + def get_client_ip(request: Request) -> str: + if request.client and request.client.host: + return request.client.host + + return "unknown" + + def ensure_csrf_token(request: Request) -> str: + token = request.session.get("csrf_token") + if not token: + token = new_csrf_token() + request.session["csrf_token"] = token + return token + + def verify_csrf(request: Request, token: str | None) -> None: + expected = request.session.get("csrf_token") + if not expected or not token or not secrets.compare_digest(expected, token): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="CSRF validation failed") + + def login_user(request: Request, user: User) -> str: + request.session.clear() + request.session["user_id"] = user.id + csrf_token = new_csrf_token() + request.session["csrf_token"] = csrf_token + return csrf_token + + def get_current_user(request: Request, db: Session) -> User | None: + user_id = request.session.get("user_id") + if not user_id: + return None + + stmt = select(User).where(User.id == user_id, User.is_active.is_(True)) + return db.execute(stmt).scalar_one_or_none() + + def require_user(request: Request, db: Session) -> User: + user = get_current_user(request, db) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated") + return user + + def require_admin(request: Request, db: Session) -> User: + user = require_user(request, db) + if user.role != "admin": + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Adminrechte erforderlich") + return user + + def user_public_payload(user: User, csrf_token: str) -> dict: + return { + "id": user.id, + "email": user.email, + "email_verified": user.email_verified, + "weekly_target_minutes": user.weekly_target_minutes, + "role": user.role, + "working_days": sorted(get_user_working_days(user)), + "count_vacation_as_worktime": user.count_vacation_as_worktime, + "count_holiday_as_worktime": user.count_holiday_as_worktime, + "count_sick_as_worktime": user.count_sick_as_worktime, + "automatic_break_rules_enabled": user.automatic_break_rules_enabled, + "default_break_minutes": user.default_break_minutes, + "preferred_home_view": user.preferred_home_view, + "preferred_month_view_mode": user.preferred_month_view_mode, + "entry_mode": user.entry_mode, + "overtime_start_date": user.overtime_start_date.isoformat() if user.overtime_start_date else None, + "overtime_expiry_days": user.overtime_expiry_days, + "expire_negative_overtime": user.expire_negative_overtime, + "vacation_days_total": user.vacation_days_total, + "vacation_show_in_header": user.vacation_show_in_header, + "workhours_counter_enabled": user.workhours_counter_enabled, + "workhours_counter_show_in_header": user.workhours_counter_show_in_header, + "workhours_counter_start_date": ( + user.workhours_counter_start_date.isoformat() if user.workhours_counter_start_date else None + ), + "workhours_counter_end_date": ( + user.workhours_counter_end_date.isoformat() if user.workhours_counter_end_date else None + ), + "workhours_counter_manual_offset_minutes": user.workhours_counter_manual_offset_minutes, + "workhours_counter_target_minutes": user.workhours_counter_target_minutes, + "workhours_counter_target_email_enabled": user.workhours_counter_target_email_enabled, + "federal_state": user.federal_state, + "mfa_method": user.mfa_method, + "csrf_token": csrf_token, + } + + def ensure_utc_datetime(value: datetime) -> datetime: + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value.astimezone(timezone.utc) + + def get_email_config(db: Session) -> EmailServerConfig | None: + stmt = select(EmailServerConfig).order_by(EmailServerConfig.id.asc()) + return db.execute(stmt).scalars().first() + + def resolve_mail_settings(db: Session) -> MailServerSettings | None: + config = get_email_config(db) + if not config: + return None + if not config.smtp_host.strip() or not config.from_email.strip(): + return None + smtp_password = decrypt_secret(fernet, config.smtp_password_encrypted) + return MailServerSettings( + smtp_host=config.smtp_host.strip(), + smtp_port=config.smtp_port, + smtp_username=(config.smtp_username.strip() if config.smtp_username else None), + smtp_password=smtp_password, + from_email=config.from_email.strip(), + from_name=config.from_name.strip() or settings.app_name, + use_starttls=config.use_starttls, + use_ssl=config.use_ssl, + verify_tls=config.verify_tls, + timeout_seconds=settings.smtp_timeout_seconds, + ) + + def selected_admin_notification_recipients(db: Session) -> list[str]: + config = get_email_config(db) + selected_admin_ids = parse_admin_id_csv( + config.registration_admin_notify_admin_ids_csv if config else None + ) + recipients: list[str] = [] + if selected_admin_ids: + selected_admin_stmt = ( + select(User.email) + .where( + User.role == "admin", + User.is_active.is_(True), + User.id.in_(selected_admin_ids), + ) + .order_by(User.created_at.asc()) + ) + recipients = [email.strip().lower() for email in db.execute(selected_admin_stmt).scalars().all() if email] + + if not recipients: + recipients = [item.strip().lower() for item in settings.registration_notify_email.split(",") if item.strip()] + + return list(dict.fromkeys(recipients)) + + def get_site_content_record(db: Session, key: str) -> SiteContent | None: + stmt = select(SiteContent).where(SiteContent.key == key) + return db.execute(stmt).scalar_one_or_none() + + def get_site_content_markdown(db: Session, key: str) -> str: + record = get_site_content_record(db, key) + if record and record.markdown_text.strip(): + return record.markdown_text + return default_site_content_markdown(key) + + def upsert_site_content(db: Session, *, key: str, markdown_text: str, updated_by_user_id: str) -> None: + record = get_site_content_record(db, key) + normalized = normalize_markdown_input(markdown_text) + if record is None: + record = SiteContent(key=key) + db.add(record) + record.markdown_text = normalized + record.updated_by_user_id = updated_by_user_id + + def support_ticket_ip_hash(request: Request) -> str: + return hash_token(f"support-ticket:{settings.session_secret}:{get_client_ip(request)}") + + def issue_contact_form_started_at(request: Request) -> str: + started_at = utc_now().isoformat() + request.session["contact_form_started_at"] = started_at + return started_at + + def contact_form_started_at(request: Request) -> str: + started_at = request.session.get("contact_form_started_at") + if not started_at: + started_at = issue_contact_form_started_at(request) + return started_at + + def support_ticket_rate_limited(*, db: Session, ip_hash: str, email: str) -> bool: + cutoff = utc_now() - SUPPORT_TICKET_RATE_LIMIT_WINDOW + by_ip = db.execute( + select(SupportTicket.id).where( + SupportTicket.source_ip_hash == ip_hash, + SupportTicket.created_at >= cutoff, + ) + ).scalars().all() + by_email = db.execute( + select(SupportTicket.id).where( + SupportTicket.email == email.lower().strip(), + SupportTicket.created_at >= cutoff, + ) + ).scalars().all() + return len(by_ip) >= SUPPORT_TICKET_RATE_LIMIT_MAX_PER_IP or len(by_email) >= SUPPORT_TICKET_RATE_LIMIT_MAX_PER_EMAIL + + def send_support_ticket_notification(*, db: Session, ticket: SupportTicket) -> None: + mail_settings = resolve_mail_settings(db) + recipients = selected_admin_notification_recipients(db) + if not mail_settings or not recipients: + return + + category_label = ticket_category_label(ticket.category) + message_body = ( + "Es wurde ein neues Support-Ticket erstellt.\n\n" + f"Kategorie: {category_label}\n" + f"Status: {ticket_status_label(ticket.status)}\n" + f"Name: {ticket.name or '-'}\n" + f"E-Mail: {ticket.email}\n" + f"Betreff: {ticket.subject}\n" + f"Zeitpunkt (UTC): {ensure_utc_datetime(ticket.created_at).isoformat()}\n\n" + f"Nachricht:\n{ticket.message}\n" + ) + + for recipient in recipients: + try: + send_email( + settings=mail_settings, + to_email=recipient, + subject=f"Neues Ticket bei {settings.app_name}", + text_body=message_body, + ) + except Exception: + logger.exception("support_ticket_notification_failed ticket=%s recipient=%s", ticket.id, recipient) + + def send_registration_email_if_enabled(*, db: Session, user: User) -> None: + config = get_email_config(db) + if not config or not config.registration_mails_enabled: + return + mail_settings = resolve_mail_settings(db) + if not mail_settings: + return + try: + send_email( + settings=mail_settings, + to_email=user.email, + subject=f"Willkommen bei {settings.app_name}", + text_body=( + "Dein Konto wurde erfolgreich erstellt.\n\n" + f"Du kannst dich jetzt bei {settings.app_name} anmelden und deine Zeiten verwalten." + ), + ) + except Exception: + logger.exception("registration_email_failed email=%s", user.email) + + def send_registration_admin_notification(*, db: Session, user: User, source: str) -> None: + config = get_email_config(db) + if config and not config.registration_admin_notify_enabled: + return + recipients = selected_admin_notification_recipients(db) + mail_settings = resolve_mail_settings(db) + if not mail_settings or not recipients: + return + + for recipient in dict.fromkeys(recipients): + try: + send_email( + settings=mail_settings, + to_email=recipient, + subject=f"Neue Registrierung bei {settings.app_name}", + text_body=( + "Es wurde ein neues Konto registriert.\n\n" + f"E-Mail: {user.email}\n" + f"Rolle: {user.role}\n" + f"E-Mail bestätigt: {'ja' if user.email_verified else 'nein'}\n" + f"Quelle: {source}\n" + f"Zeitpunkt (UTC): {utc_now().isoformat()}\n" + ), + ) + except Exception: + logger.exception( + "registration_admin_notification_failed notify=%s email=%s source=%s", + recipient, + user.email, + source, + ) + + def is_email_verification_enabled(db: Session) -> bool: + if not settings.email_verification_required: + return False + return resolve_mail_settings(db) is not None + + def issue_email_verification_token(user: User) -> str: + raw_token = generate_reset_token() + user.email_verification_token_hash = hash_token(raw_token) + user.email_verification_expires_at = utc_now() + timedelta(minutes=settings.email_verification_token_ttl_minutes) + user.email_verification_sent_at = utc_now() + return raw_token + + def send_email_verification_link(*, request: Request, db: Session, user: User, force: bool = False) -> tuple[bool, str]: + if user.email_verified: + return True, "already_verified" + + mail_settings = resolve_mail_settings(db) + if not mail_settings: + return False, "mail_not_configured" + + if not force and user.email_verification_sent_at is not None: + last_sent_at = ensure_utc_datetime(user.email_verification_sent_at) + if (utc_now() - last_sent_at) < timedelta(seconds=30): + return False, "rate_limited" + + raw_token = issue_email_verification_token(user) + db.add(user) + db.commit() + + verify_url = f"{str(request.base_url).rstrip('/')}/verify-email?token={raw_token}" + try: + send_email( + settings=mail_settings, + to_email=user.email, + subject="Bitte E-Mail-Adresse bestätigen", + text_body=( + f"Bitte bestätige deine E-Mail-Adresse für {settings.app_name}.\n\n" + f"Link: {verify_url}\n\n" + f"Der Link ist {settings.email_verification_token_ttl_minutes} Minuten gültig." + ), + ) + except Exception: + logger.exception("email_verification_send_failed email=%s", user.email) + return False, "send_failed" + return True, "sent" + + def get_user_by_email_verification_token(db: Session, raw_token: str) -> User | None: + token_hash_value = hash_token(raw_token) + stmt = select(User).where(User.email_verification_token_hash == token_hash_value, User.is_active.is_(True)) + user = db.execute(stmt).scalar_one_or_none() + if not user: + return None + if user.email_verified: + return None + if not user.email_verification_expires_at: + return None + if utc_now() > ensure_utc_datetime(user.email_verification_expires_at): + return None + return user + + def begin_pending_mfa_login(request: Request, user: User) -> str: + request.session.clear() + csrf_token = new_csrf_token() + request.session["csrf_token"] = csrf_token + request.session["mfa_pending_user_id"] = user.id + request.session["mfa_pending_method"] = user.mfa_method + request.session["mfa_pending_created_at"] = utc_now().isoformat() + request.session["mfa_pending_attempts"] = 0 + return csrf_token + + def clear_pending_mfa_login(request: Request) -> None: + request.session.pop("mfa_pending_user_id", None) + request.session.pop("mfa_pending_method", None) + request.session.pop("mfa_pending_created_at", None) + request.session.pop("mfa_pending_attempts", None) + + def get_pending_mfa_user(request: Request, db: Session) -> tuple[User | None, str | None]: + pending_user_id = request.session.get("mfa_pending_user_id") + pending_method = request.session.get("mfa_pending_method") + pending_created_at = request.session.get("mfa_pending_created_at") + if not pending_user_id or not pending_method or not pending_created_at: + return None, None + try: + created_at = datetime.fromisoformat(str(pending_created_at)) + except ValueError: + clear_pending_mfa_login(request) + return None, None + if created_at.tzinfo is None: + created_at = created_at.replace(tzinfo=timezone.utc) + expires_at = created_at + timedelta(minutes=settings.mfa_pending_ttl_minutes) + if utc_now() > expires_at: + clear_pending_mfa_login(request) + return None, None + stmt = select(User).where(User.id == pending_user_id, User.is_active.is_(True)) + user = db.execute(stmt).scalar_one_or_none() + if not user: + clear_pending_mfa_login(request) + return None, None + return user, pending_method + + def send_email_mfa_code(*, db: Session, user: User) -> bool: + mail_settings = resolve_mail_settings(db) + if not mail_settings: + return False + code = generate_numeric_code(6) + user.mfa_email_code_hash = hash_password(code) + user.mfa_email_code_expires_at = utc_now() + timedelta(minutes=settings.mfa_code_ttl_minutes) + user.mfa_email_code_sent_at = utc_now() + db.add(user) + db.commit() + try: + send_email( + settings=mail_settings, + to_email=user.email, + subject="Dein Anmeldecode", + text_body=( + "Dein 6-stelliger Sicherheitscode lautet: " + f"{code}\n\nDer Code ist {settings.mfa_code_ttl_minutes} Minuten gueltig." + ), + ) + except Exception: + logger.exception("mfa_email_send_failed email=%s", user.email) + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + db.add(user) + db.commit() + return False + return True + + def start_mfa_challenge(request: Request, db: Session, user: User) -> tuple[bool, str]: + if user.mfa_method == MFA_METHOD_NONE: + return True, "" + begin_pending_mfa_login(request, user) + if user.mfa_method == MFA_METHOD_EMAIL: + if not send_email_mfa_code(db=db, user=user): + clear_pending_mfa_login(request) + return False, "E-Mail-Code konnte nicht versendet werden. Bitte Admin kontaktieren." + return False, "" + + def verify_pending_mfa_code(request: Request, db: Session, code: str) -> tuple[User | None, str | None]: + user, method = get_pending_mfa_user(request, db) + if not user or not method: + return None, "MFA-Session ist abgelaufen. Bitte erneut anmelden." + + attempts = int(request.session.get("mfa_pending_attempts", 0)) + 1 + request.session["mfa_pending_attempts"] = attempts + if attempts > 8: + clear_pending_mfa_login(request) + return None, "Zu viele Fehlversuche. Bitte erneut anmelden." + + normalized_code = normalize_otp_code(code) + if method == MFA_METHOD_TOTP: + secret = decrypt_secret(fernet, user.mfa_totp_secret_encrypted) + if not secret or not verify_totp_code(secret=secret, code=normalized_code): + return None, "Ungueltiger Authenticator-Code." + elif method == MFA_METHOD_EMAIL: + if not user.mfa_email_code_hash or not user.mfa_email_code_expires_at: + return None, "Kein gueltiger E-Mail-Code vorhanden." + expires_at = ensure_utc_datetime(user.mfa_email_code_expires_at) + if utc_now() > expires_at: + return None, "E-Mail-Code ist abgelaufen. Bitte neuen Code anfordern." + if not verify_password(normalized_code, user.mfa_email_code_hash): + return None, "Ungueltiger E-Mail-Code." + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + db.add(user) + db.commit() + else: + return None, "Unbekannte MFA-Methode." + + clear_pending_mfa_login(request) + return user, None + + def count_admin_users(db: Session) -> int: + stmt = select(User).where(User.role == "admin", User.is_active.is_(True)) + return len(db.execute(stmt).scalars().all()) + + def parse_admin_id_csv(value: str | None) -> list[str]: + if not value: + return [] + parsed: list[str] = [] + seen: set[str] = set() + for raw_item in value.split(","): + item = raw_item.strip() + if not item or item in seen: + continue + parsed.append(item) + seen.add(item) + return parsed + + def parse_date_query(value: str | None, *, default: date | None = None) -> date: + if value is None: + if default is None: + raise HTTPException(status_code=400, detail="Date is required") + return default + try: + return datetime.strptime(value, "%Y-%m-%d").date() + except ValueError as exc: + raise HTTPException(status_code=400, detail="Date must be YYYY-MM-DD") from exc + + def parse_date_fallback_today(value: str) -> date: + try: + return parse_date_query(value) + except HTTPException: + return date.today() + + def safe_relative_url(request: Request, value: str | None) -> str | None: + if not value: + return None + if value.startswith("/"): + return value + parsed = urlparse(value) + if parsed.scheme in {"http", "https"} and parsed.netloc == request.url.netloc: + relative = parsed.path or "/" + if parsed.query: + relative = f"{relative}?{parsed.query}" + return relative + return None + + def resolve_return_to(request: Request, *, fallback: str) -> str: + return ( + safe_relative_url(request, request.query_params.get("return_to")) + or safe_relative_url(request, request.headers.get("referer")) + or fallback + ) + + def parse_day_status_mode(value: str) -> str: + if value not in DAY_STATUS_QUERY_LABELS: + raise HTTPException(status_code=400, detail="Ungueltiger Tagesmodus") + return value + + def current_day_status_key(*, is_vacation: bool, special_status: str | None) -> str | None: + if is_vacation: + return DAY_STATUS_QUERY_VACATION + if special_status in SPECIAL_DAY_STATUS_LABELS: + return special_status + return None + + def serialize_entry(entry: TimeEntry) -> dict: + gross_minutes = entry.end_minutes - entry.start_minutes + net_minutes = compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + return { + "id": entry.id, + "date": entry.date.isoformat(), + "start_time": minutes_to_hhmm(entry.start_minutes), + "end_time": minutes_to_hhmm(entry.end_minutes), + "break_minutes": entry.break_minutes, + "break_mode": entry.break_rule_mode, + "gross_minutes": gross_minutes, + "net_minutes": net_minutes, + "notes": entry.notes, + } + + def normalize_break_mode(value: str | None, default: str = "manual") -> str: + if value in {"manual", "auto"}: + return value + return default + + def auto_break_rules_enabled(user: User) -> bool: + return bool(user.automatic_break_rules_enabled) + + def default_break_minutes_for_user(user: User) -> int: + return max(0, int(user.default_break_minutes or 0)) + + def resolve_break_settings( + *, + user: User, + start_minutes: int, + end_minutes: int, + submitted_break_minutes: int | None, + submitted_break_mode: str | None, + existing_break_mode: str | None = None, + existing_break_minutes: int | None = None, + start_or_end_changed: bool = False, + ) -> tuple[int, str]: + default_mode = "auto" if auto_break_rules_enabled(user) else "manual" + break_mode = normalize_break_mode(submitted_break_mode, default=default_mode) + + if auto_break_rules_enabled(user): + if break_mode == "auto": + return automatic_break_minutes(start_minutes, end_minutes), "auto" + if submitted_break_minutes is not None: + return submitted_break_minutes, "manual" + if existing_break_mode == "auto" and start_or_end_changed: + return automatic_break_minutes(start_minutes, end_minutes), "auto" + + if submitted_break_minutes is not None: + return submitted_break_minutes, "manual" + if existing_break_minutes is not None: + return existing_break_minutes, normalize_break_mode(existing_break_mode, default="manual") + return default_break_minutes_for_user(user), "manual" + + def user_home_url(user: User) -> str: + if user.preferred_home_view == "month": + return f"/month?{urlencode({'view': user.preferred_month_view_mode or 'flat'})}" + return "/dashboard" + + def parse_weekly_target_scope(scope: str) -> str: + valid_scopes = {"current_week", "all_weeks", "from_current_week"} + if scope not in valid_scopes: + raise HTTPException(status_code=400, detail="ungueltiger Scope") + return scope + + def parse_bulk_mode(mode: str) -> str: + valid_modes = {"only_missing", "upsert"} + if mode not in valid_modes: + raise HTTPException(status_code=400, detail="ungueltiger Bulk-Modus") + return mode + + def parse_weekday_values(values: list[str]) -> list[int]: + normalized: list[int] = [] + for value in values: + try: + day = int(value) + except ValueError as exc: + raise HTTPException(status_code=400, detail="ungueltiger Wochentag") from exc + if day < 0 or day > 6: + raise HTTPException(status_code=400, detail="ungueltiger Wochentag") + if day not in normalized: + normalized.append(day) + return sorted(normalized) + + def get_user_working_days(user: User) -> set[int]: + return parse_working_days_csv(user.working_days_csv) + + def parse_signed_duration_to_minutes(value: str) -> int: + normalized = value.strip() + if not normalized: + raise ValueError("Bitte Stunden im Format +HH:MM oder -HH:MM eingeben") + match = re.fullmatch(r"([+-])?\s*(\d{1,3}):([0-5]\d)", normalized) + if not match: + raise ValueError("Bitte Stunden im Format +HH:MM oder -HH:MM eingeben") + sign_part, hours_part, minutes_part = match.groups() + total = int(hours_part) * 60 + int(minutes_part) + if total <= 0: + raise ValueError("Der Stundenausgleich muss groesser als 0 sein") + return -total if sign_part == "-" else total + + def full_day_adjustment_minutes(*, db: Session, user: User, selected_date: date, positive: bool) -> int: + working_days = get_user_working_days(user) + workdays_per_week = max(1, len(working_days)) + rules = list_rules_for_user(db, user.id) + weekly_target_minutes = target_for_week(rules, monday_of(selected_date), user.weekly_target_minutes) + per_day_minutes = int(round(weekly_target_minutes / workdays_per_week)) + if per_day_minutes <= 0: + raise ValueError("Fuer diesen Tag kann kein Tages-Stundenausgleich berechnet werden") + return per_day_minutes if positive else -per_day_minutes + + def full_day_work_minutes_or_none(*, db: Session, user: User, selected_date: date) -> int | None: + try: + return full_day_adjustment_minutes(db=db, user=user, selected_date=selected_date, positive=True) + except ValueError: + return None + + def is_auto_holiday_status(status: SpecialDayStatus) -> bool: + return ( + status.status == SPECIAL_DAY_STATUS_HOLIDAY + and bool(status.notes) + and str(status.notes).startswith(AUTO_HOLIDAY_NOTE_PREFIX) + ) + + def sync_auto_holidays_for_user( + *, + db: Session, + user: User, + from_date: date, + to_date: date, + ) -> dict[str, int]: + if to_date < from_date: + return {"created": 0, "removed": 0} + + existing_statuses = list_special_statuses_for_user(db, user.id, from_date, to_date) + existing_status_by_date = {status.date: status for status in existing_statuses} + auto_existing_by_date = { + status.date: status for status in existing_statuses if is_auto_holiday_status(status) + } + + if not user.federal_state: + removed = 0 + for status in auto_existing_by_date.values(): + db.delete(status) + removed += 1 + return {"created": 0, "removed": removed} + + holiday_dates = list_public_holiday_dates( + federal_state=user.federal_state, + from_date=from_date, + to_date=to_date, + ) + + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= from_date, TimeEntry.date <= to_date) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + worked_dates = { + entry.date + for entry in entries + if compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) > 0 + } + target_dates = {day for day in holiday_dates if day not in worked_dates} + + created = 0 + removed = 0 + + for day, auto_status in auto_existing_by_date.items(): + if day not in target_dates: + db.delete(auto_status) + removed += 1 + + for day in sorted(target_dates): + existing = existing_status_by_date.get(day) + if existing: + if is_auto_holiday_status(existing): + continue + # Manueller Status bleibt unverändert (z. B. Krankheit). + continue + db.add( + SpecialDayStatus( + user_id=user.id, + date=day, + status=SPECIAL_DAY_STATUS_HOLIDAY, + notes=f"{AUTO_HOLIDAY_NOTE_PREFIX}{user.federal_state}", + ) + ) + created += 1 + + return {"created": created, "removed": removed} + + def sync_auto_holidays_for_all_users(*, db: Session, from_date: date, to_date: date) -> None: + stmt = ( + select(User) + .where( + User.is_active.is_(True), + User.federal_state.is_not(None), + User.federal_state != "", + ) + .order_by(User.created_at.asc()) + ) + users = db.execute(stmt).scalars().all() + for user in users: + try: + sync_auto_holidays_for_user(db=db, user=user, from_date=from_date, to_date=to_date) + except Exception: + logger.exception("startup_holiday_sync_failed email=%s", user.email) + + def day_status_for_user(*, db: Session, user: User, selected_date: date) -> tuple[bool, str | None]: + working_days = get_user_working_days(user) + vacations = list_vacations_for_user(db, user.id, selected_date, selected_date) + vacation_dates = expand_vacation_dates( + vacations, + selected_date, + selected_date, + relevant_weekdays=working_days, + ) + special = list_special_statuses_for_user(db, user.id, selected_date, selected_date) + special_map = special_status_map(special) + return selected_date in vacation_dates, special_map.get(selected_date) + + def get_workhours_counter_manual_offset_minutes(user: User) -> int: + value = user.workhours_counter_manual_offset_minutes or 0 + return max(0, int(value)) + + def compute_workhours_counter_minutes( + *, + db: Session, + user: User, + from_date: date, + to_date: date, + ) -> int: + if to_date < from_date: + return 0 + + working_days = get_user_working_days(user) + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= from_date, TimeEntry.date <= to_date) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + entry_map = {entry.date: entry for entry in entries} + vacations = list_vacations_for_user(db, user.id, from_date, to_date) + vacation_dates = expand_vacation_dates(vacations, from_date, to_date, relevant_weekdays=working_days) + special_days = list_special_statuses_for_user(db, user.id, from_date, to_date) + blocked_dates = effective_non_working_dates_for_user(user=user, special_statuses=special_days) + count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates, + special_statuses=special_days, + ) + rules = list_rules_for_user(db, user.id) + workdays_per_week = max(1, len(working_days)) + + total = 0 + cursor = from_date + while cursor <= to_date: + if cursor.weekday() not in working_days: + cursor += timedelta(days=1) + continue + if cursor in count_as_worktime_dates: + week_target = target_for_week(rules, monday_of(cursor), user.weekly_target_minutes) + total += int(round(week_target / workdays_per_week)) + cursor += timedelta(days=1) + continue + if cursor in vacation_dates or cursor in blocked_dates: + cursor += timedelta(days=1) + continue + entry = entry_map.get(cursor) + if entry: + total += compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + cursor += timedelta(days=1) + return total + get_workhours_counter_manual_offset_minutes(user) + + def compute_workhours_counter_forecast( + *, + db: Session, + user: User, + from_date: date, + to_date: date, + ) -> dict[str, int]: + if to_date < from_date: + return {"logged_minutes": 0, "projected_minutes": 0} + + working_days = get_user_working_days(user) + if not working_days: + return {"logged_minutes": 0, "projected_minutes": 0} + + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= from_date, TimeEntry.date <= to_date) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + entry_map = {entry.date: entry for entry in entries} + vacations = list_vacations_for_user(db, user.id, from_date, to_date) + vacation_dates = expand_vacation_dates(vacations, from_date, to_date, relevant_weekdays=working_days) + special_days = list_special_statuses_for_user(db, user.id, from_date, to_date) + blocked_dates = effective_non_working_dates_for_user(user=user, special_statuses=special_days) + count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates, + special_statuses=special_days, + ) + rules = list_rules_for_user(db, user.id) + workdays_per_week = max(1, len(working_days)) + + today = date.today() + logged_to = min(today, to_date) + logged_minutes = 0 + cursor = from_date + while cursor <= logged_to: + if cursor.weekday() not in working_days: + cursor += timedelta(days=1) + continue + if cursor in count_as_worktime_dates: + week_target = target_for_week(rules, monday_of(cursor), user.weekly_target_minutes) + logged_minutes += int(round(week_target / workdays_per_week)) + cursor += timedelta(days=1) + continue + if cursor in vacation_dates or cursor in blocked_dates: + cursor += timedelta(days=1) + continue + entry = entry_map.get(cursor) + if entry: + logged_minutes += compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + cursor += timedelta(days=1) + + remaining_planned = 0.0 + remaining_start = max(from_date, today + timedelta(days=1)) + cursor = remaining_start + while cursor <= to_date: + if cursor.weekday() in working_days and ( + cursor in count_as_worktime_dates or (cursor not in vacation_dates and cursor not in blocked_dates) + ): + week_target = target_for_week(rules, monday_of(cursor), user.weekly_target_minutes) + remaining_planned += week_target / workdays_per_week + cursor += timedelta(days=1) + + manual_offset = get_workhours_counter_manual_offset_minutes(user) + logged_minutes += manual_offset + projected_minutes = logged_minutes + int(round(remaining_planned)) + return { + "logged_minutes": logged_minutes, + "projected_minutes": projected_minutes, + } + + def build_workhours_target_warning( + *, + db: Session, + user: User, + ) -> dict[str, object] | None: + if not user.workhours_counter_enabled: + return None + if user.workhours_counter_start_date is None or user.workhours_counter_end_date is None: + return None + if user.workhours_counter_end_date < user.workhours_counter_start_date: + return None + target_minutes = user.workhours_counter_target_minutes + if target_minutes is None or target_minutes <= 0: + return None + + forecast = compute_workhours_counter_forecast( + db=db, + user=user, + from_date=user.workhours_counter_start_date, + to_date=user.workhours_counter_end_date, + ) + logged_minutes = forecast["logged_minutes"] + projected_minutes = forecast["projected_minutes"] + at_risk = date.today() <= user.workhours_counter_end_date and projected_minutes < target_minutes + missing_minutes = max(0, target_minutes - projected_minutes) + + return { + "start_date": user.workhours_counter_start_date, + "end_date": user.workhours_counter_end_date, + "logged_minutes": logged_minutes, + "projected_minutes": projected_minutes, + "target_minutes": target_minutes, + "missing_minutes": missing_minutes, + "at_risk": at_risk, + } + + def maybe_send_workhours_target_warning_email( + *, + db: Session, + user: User, + warning: dict[str, object] | None, + ) -> None: + if warning is None or warning.get("at_risk") is not True: + return + if not user.workhours_counter_target_email_enabled: + return + + mail_settings = resolve_mail_settings(db) + if not mail_settings: + return + + start_date = warning["start_date"] + end_date = warning["end_date"] + target_minutes = warning["target_minutes"] + logged_minutes = warning["logged_minutes"] + projected_minutes = warning["projected_minutes"] + missing_minutes = warning["missing_minutes"] + if not isinstance(start_date, date) or not isinstance(end_date, date): + return + if not isinstance(target_minutes, int): + return + if not isinstance(logged_minutes, int) or not isinstance(projected_minutes, int) or not isinstance(missing_minutes, int): + return + + warning_key = f"{start_date.isoformat()}|{end_date.isoformat()}|{target_minutes}" + today = date.today() + if ( + user.workhours_counter_warning_last_sent_on == today + and user.workhours_counter_warning_last_sent_key == warning_key + ): + return + + try: + send_email( + settings=mail_settings, + to_email=user.email, + subject="Warnung: Arbeitsstundenziel gefährdet", + text_body=( + "Dein gesetztes Arbeitsstundenziel im Counter-Zeitraum ist mit den aktuellen " + "Einstellungen voraussichtlich nicht erreichbar.\n\n" + f"Zeitraum: {start_date.strftime('%d.%m.%Y')} - " + f"{end_date.strftime('%d.%m.%Y')}\n" + f"Ziel: {minutes_to_hhmm(target_minutes)}\n" + f"Bisher erfasst: {minutes_to_hhmm(logged_minutes)}\n" + f"Prognose bis Periodenende: {minutes_to_hhmm(projected_minutes)}\n" + f"Voraussichtliche Lücke: {minutes_to_hhmm(missing_minutes)}\n\n" + "Bitte passe bei Bedarf Arbeitszeiten oder Einstellungen an." + ), + ) + except Exception: + logger.exception("workhours_target_warning_mail_failed email=%s", user.email) + return + + user.workhours_counter_warning_last_sent_on = today + user.workhours_counter_warning_last_sent_key = warning_key + db.add(user) + db.commit() + + def range_is_full_vacation( + start_date: date, + end_date: date, + *, + vacation_dates: set[date], + relevant_weekdays: set[int], + ) -> bool: + relevant_dates = [ + start_date + timedelta(days=offset) + for offset in range((end_date - start_date).days + 1) + if (start_date + timedelta(days=offset)).weekday() in relevant_weekdays + ] + if not relevant_dates: + return False + return all(day in vacation_dates for day in relevant_dates) + + def add_vacation_range( + *, + db: Session, + user_id: str, + start_date: date, + end_date: date, + include_weekends: bool = True, + notes: str | None = None, + ) -> None: + period = VacationPeriod( + user_id=user_id, + start_date=start_date, + end_date=end_date, + include_weekends=include_weekends, + notes=notes, + ) + db.add(period) + + def add_vacation_for_weekdays( + *, + db: Session, + user_id: str, + start_date: date, + end_date: date, + relevant_weekdays: set[int], + notes: str | None = None, + ) -> None: + days_to_add = [ + start_date + timedelta(days=offset) + for offset in range((end_date - start_date).days + 1) + if (start_date + timedelta(days=offset)).weekday() in relevant_weekdays + ] + if not days_to_add: + return + + block_start = days_to_add[0] + previous_day = days_to_add[0] + + for current_day in days_to_add[1:]: + if current_day == previous_day + timedelta(days=1): + previous_day = current_day + continue + + add_vacation_range( + db=db, + user_id=user_id, + start_date=block_start, + end_date=previous_day, + include_weekends=True, + notes=notes, + ) + block_start = current_day + previous_day = current_day + + add_vacation_range( + db=db, + user_id=user_id, + start_date=block_start, + end_date=previous_day, + include_weekends=True, + notes=notes, + ) + + def build_effective_vacation_ranges( + *, + periods: list[VacationPeriod], + relevant_weekdays: set[int], + ) -> list[dict]: + if not periods: + return [] + + from_date = min(period.start_date for period in periods) + to_date = max(period.end_date for period in periods) + effective_dates = expand_vacation_dates( + periods, + from_date, + to_date, + relevant_weekdays=relevant_weekdays, + ) + ranges = collapse_dates_to_ranges(effective_dates) + return [{"start_date": start, "end_date": end} for start, end in ranges] + + def remove_vacation_range( + *, + db: Session, + user_id: str, + start_date: date, + end_date: date, + ) -> None: + overlapping_stmt = ( + select(VacationPeriod) + .where( + VacationPeriod.user_id == user_id, + VacationPeriod.end_date >= start_date, + VacationPeriod.start_date <= end_date, + ) + .order_by(VacationPeriod.start_date.asc()) + ) + overlapping_periods = db.execute(overlapping_stmt).scalars().all() + + for period in overlapping_periods: + period_start = period.start_date + period_end = period.end_date + + if start_date <= period_start and end_date >= period_end: + db.delete(period) + continue + + if start_date > period_start and end_date < period_end: + left_end = start_date - timedelta(days=1) + right_start = end_date + timedelta(days=1) + period.end_date = left_end + right_period = VacationPeriod( + user_id=user_id, + start_date=right_start, + end_date=period_end, + include_weekends=period.include_weekends, + notes=period.notes, + ) + db.add(right_period) + continue + + if start_date <= period_start <= end_date < period_end: + period.start_date = end_date + timedelta(days=1) + continue + + if period_start < start_date <= period_end <= end_date: + period.end_date = start_date - timedelta(days=1) + continue + + weekday_options = [ + {"value": 0, "label": "Montag"}, + {"value": 1, "label": "Dienstag"}, + {"value": 2, "label": "Mittwoch"}, + {"value": 3, "label": "Donnerstag"}, + {"value": 4, "label": "Freitag"}, + {"value": 5, "label": "Samstag"}, + {"value": 6, "label": "Sonntag"}, + ] + + def render_bulk_form( + request: Request, + *, + db: Session, + user: User, + from_date_value: str, + to_date_value: str, + weekdays_selected: list[int], + bulk_mode: str, + start_time: str, + end_time: str, + break_minutes: int, + break_mode: str, + notes: str, + error: str | None = None, + success_message: str | None = None, + status_code: int = status.HTTP_200_OK, + ): + return templates.TemplateResponse( + "pages/bulk_entry.html", + build_context( + request, + user=user, + db=db, + from_date=from_date_value, + to_date=to_date_value, + weekdays_selected=weekdays_selected, + bulk_mode=bulk_mode, + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + weekday_options=weekday_options, + error=error, + success_message=success_message, + ), + status_code=status_code, + ) + + def render_settings_form( + request: Request, + *, + db: Session, + user: User, + active_tab: str = "settings", + import_preview: dict | None = None, + import_mode_selected: str = "merge", + success_message: str | None = None, + error: str | None = None, + status_code: int = status.HTTP_200_OK, + ): + vacations_stmt = ( + select(VacationPeriod) + .where(VacationPeriod.user_id == user.id) + .order_by(VacationPeriod.start_date.asc(), VacationPeriod.end_date.asc()) + ) + vacations = db.execute(vacations_stmt).scalars().all() + today = date.today() + rules = list_rules_for_user(db, user.id) + settings_weekly_target_minutes = target_for_week(rules, monday_of(today), user.weekly_target_minutes) + working_days = get_user_working_days(user) + working_days_selected = sorted(working_days) + vacation_ranges = build_effective_vacation_ranges(periods=vacations, relevant_weekdays=working_days) + overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, date(1970, 1, 1), date(2100, 12, 31)) + overtime_adjustment_total_positive = sum(max(0, item.minutes) for item in overtime_adjustments) + overtime_adjustment_total_negative = sum(min(0, item.minutes) for item in overtime_adjustments) + overtime_adjustment_full_day_count = sum( + 1 for item in overtime_adjustments if item.notes and "ganzer Tag" in item.notes + ) + workhours_counter_minutes: int | None = None + workhours_counter_warning: dict[str, object] | None = None + if ( + user.workhours_counter_enabled + and user.workhours_counter_start_date is not None + and user.workhours_counter_end_date is not None + and user.workhours_counter_end_date >= user.workhours_counter_start_date + ): + workhours_counter_minutes = compute_workhours_counter_minutes( + db=db, + user=user, + from_date=user.workhours_counter_start_date, + to_date=user.workhours_counter_end_date, + ) + workhours_counter_warning = build_workhours_target_warning(db=db, user=user) + + mfa_setup_secret = request.session.get("mfa_setup_secret") + mfa_setup_uri = None + if mfa_setup_secret: + mfa_setup_uri = build_totp_uri(secret=mfa_setup_secret, account_name=user.email) + + is_admin = user.role == "admin" + if active_tab not in {"settings", "admin"}: + active_tab = "settings" + if not is_admin: + active_tab = "settings" + managed_users: list[User] = [] + admin_recipients: list[User] = [] + email_server_config = get_email_config(db) + if is_admin: + users_stmt = select(User).order_by(User.created_at.asc()) + managed_users = db.execute(users_stmt).scalars().all() + admin_recipients_stmt = ( + select(User) + .where(User.role == "admin", User.is_active.is_(True)) + .order_by(User.created_at.asc()) + ) + admin_recipients = db.execute(admin_recipients_stmt).scalars().all() + site_content_markdown = { + SITE_CONTENT_IMPRESSUM: get_site_content_markdown(db, SITE_CONTENT_IMPRESSUM), + SITE_CONTENT_PRIVACY: get_site_content_markdown(db, SITE_CONTENT_PRIVACY), + } + support_tickets: list[SupportTicket] = [] + if is_admin: + support_tickets = db.execute( + select(SupportTicket).order_by( + case((SupportTicket.status == SUPPORT_TICKET_STATUS_OPEN, 0), else_=1), + SupportTicket.created_at.desc(), + ) + ).scalars().all() + + selected_notify_admin_ids = parse_admin_id_csv( + email_server_config.registration_admin_notify_admin_ids_csv if email_server_config else None + ) + fallback_notify_email = settings.registration_notify_email.strip() + + email_server_view = { + "smtp_host": email_server_config.smtp_host if email_server_config else "", + "smtp_port": email_server_config.smtp_port if email_server_config else 587, + "smtp_username": email_server_config.smtp_username if email_server_config else "", + "from_email": email_server_config.from_email if email_server_config else "", + "from_name": email_server_config.from_name if email_server_config else settings.app_name, + "use_starttls": email_server_config.use_starttls if email_server_config else True, + "use_ssl": email_server_config.use_ssl if email_server_config else False, + "verify_tls": email_server_config.verify_tls if email_server_config else True, + "registration_mails_enabled": email_server_config.registration_mails_enabled if email_server_config else True, + "password_reset_mails_enabled": email_server_config.password_reset_mails_enabled if email_server_config else True, + "registration_admin_notify_enabled": ( + email_server_config.registration_admin_notify_enabled if email_server_config else True + ), + "registration_admin_notify_admin_ids": selected_notify_admin_ids, + "registration_notify_fallback_email": fallback_notify_email, + "has_password": bool(email_server_config and email_server_config.smtp_password_encrypted), + } + + return templates.TemplateResponse( + "pages/settings.html", + build_context( + request, + user=user, + db=db, + vacations=vacations, + vacation_ranges=vacation_ranges, + overtime_adjustments=overtime_adjustments, + overtime_adjustment_total_positive=overtime_adjustment_total_positive, + overtime_adjustment_total_negative=overtime_adjustment_total_negative, + overtime_adjustment_full_day_count=overtime_adjustment_full_day_count, + vacation_start=today.isoformat(), + vacation_end=today.isoformat(), + settings_weekly_target_minutes=settings_weekly_target_minutes, + working_days_selected=working_days_selected, + weekday_options=weekday_options, + workhours_counter_minutes=workhours_counter_minutes, + workhours_counter_warning=workhours_counter_warning, + federal_state_options=GERMAN_STATE_OPTIONS, + mfa_method_labels=MFA_METHOD_LABELS, + mfa_setup_secret=mfa_setup_secret, + mfa_setup_uri=mfa_setup_uri, + is_admin=is_admin, + active_settings_tab=active_tab, + managed_users=managed_users, + admin_recipients=admin_recipients, + admin_user_count=count_admin_users(db), + email_server=email_server_view, + site_content_markdown=site_content_markdown, + support_tickets=support_tickets, + ticket_status_label=ticket_status_label, + ticket_category_label=ticket_category_label, + mail_settings_available=resolve_mail_settings(db) is not None, + import_preview=import_preview, + import_mode_selected=import_mode_selected, + success_message=success_message, + error=error, + ), + status_code=status_code, + ) + + def user_export_date_bounds(db: Session, user: User) -> tuple[date, date]: + dates: list[date] = [] + dates.extend( + db.execute(select(TimeEntry.date).where(TimeEntry.user_id == user.id).order_by(TimeEntry.date.asc())) + .scalars() + .all() + ) + dates.extend( + db.execute( + select(VacationPeriod.start_date).where(VacationPeriod.user_id == user.id).order_by(VacationPeriod.start_date.asc()) + ) + .scalars() + .all() + ) + dates.extend( + db.execute( + select(VacationPeriod.end_date).where(VacationPeriod.user_id == user.id).order_by(VacationPeriod.end_date.asc()) + ) + .scalars() + .all() + ) + dates.extend( + db.execute( + select(SpecialDayStatus.date).where(SpecialDayStatus.user_id == user.id).order_by(SpecialDayStatus.date.asc()) + ) + .scalars() + .all() + ) + dates.extend( + db.execute( + select(OvertimeAdjustment.date) + .where(OvertimeAdjustment.user_id == user.id) + .order_by(OvertimeAdjustment.date.asc()) + ) + .scalars() + .all() + ) + dates.extend( + db.execute( + select(WeeklyTargetRule.effective_from) + .where(WeeklyTargetRule.user_id == user.id) + .order_by(WeeklyTargetRule.effective_from.asc()) + ) + .scalars() + .all() + ) + for maybe_date in ( + user.overtime_start_date, + user.workhours_counter_start_date, + user.workhours_counter_end_date, + ): + if maybe_date is not None: + dates.append(maybe_date) + if not dates: + today = date.today() + return today, today + return min(dates), max(dates) + + def build_export_payload_for_range( + *, + db: Session, + user: User, + from_date: date, + to_date: date, + ) -> tuple[list[dict], list[dict], dict]: + auto_created = autofill_entries_for_range(db=db, user=user, range_start=from_date, range_end=to_date) + if auto_created: + db.commit() + + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= from_date, TimeEntry.date <= to_date) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + entries_by_date = {entry.date: entry for entry in entries} + + days: list[date] = [] + cursor = from_date + while cursor <= to_date: + days.append(cursor) + cursor += timedelta(days=1) + + week_starts = sorted({monday_of(day) for day in days}) + if not week_starts: + week_starts = [monday_of(from_date)] + + full_range_start = min(week_starts) + full_range_end = max(week_starts) + timedelta(days=6) + full_entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= full_range_start, TimeEntry.date <= full_range_end) + .order_by(TimeEntry.date.asc()) + ) + full_entries = db.execute(full_entries_stmt).scalars().all() + + rules = list_rules_for_user(db, user.id) + working_days = get_user_working_days(user) + base_week_target_map = target_map_for_weeks(rules, week_starts, user.weekly_target_minutes) + export_vacations = list_vacations_for_user(db, user.id, full_range_start, full_range_end) + export_vacation_dates = expand_vacation_dates( + export_vacations, + full_range_start, + full_range_end, + relevant_weekdays=working_days, + ) + export_special_statuses = list_special_statuses_for_user(db, user.id, full_range_start, full_range_end) + export_special_dates = effective_non_working_dates_for_user(user=user, special_statuses=export_special_statuses) + export_count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=export_vacation_dates, + special_statuses=export_special_statuses, + ) + export_special_by_date = special_status_map(export_special_statuses) + export_overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, full_range_start, full_range_end) + export_overtime_adjustment_minutes = overtime_adjustment_minutes_map(export_overtime_adjustments) + week_ist_map: dict[date, int] = {} + week_target_map: dict[date, int] = {} + week_delta_map: dict[date, int] = {} + for week_start in week_starts: + totals = compute_effective_week_totals( + entries=full_entries, + week_start=week_start, + weekly_target_minutes=base_week_target_map[week_start], + vacation_dates=export_vacation_dates, + non_working_dates=export_special_dates, + count_as_worktime_dates=export_count_as_worktime_dates, + overtime_adjustment_minutes_by_date=export_overtime_adjustment_minutes, + overtime_start_date=user.overtime_start_date, + relevant_weekdays=working_days, + ) + week_ist_map[week_start] = totals["weekly_ist"] + week_target_map[week_start] = totals["weekly_soll"] + week_delta_map[week_start] = totals["weekly_delta"] + rows = build_export_rows( + days, + entries_by_date, + week_target_map, + week_ist_map, + week_delta_map, + export_special_by_date, + export_overtime_adjustment_minutes, + ) + + week_summaries = [] + total_ist = 0 + total_delta = 0 + for week_start in week_starts: + ist = week_ist_map[week_start] + soll = week_target_map[week_start] + delta = week_delta_map[week_start] + total_ist += ist + total_delta += delta + week_summaries.append( + { + "week_start": week_start, + "week_end": week_start + timedelta(days=6), + "ist_minutes": ist, + "soll_minutes": soll, + "delta_minutes": delta, + } + ) + + totals = { + "from_date": from_date, + "to_date": to_date, + "ist_minutes": total_ist, + "delta_minutes": total_delta, + } + return rows, week_summaries, totals + + def build_user_backup_payload(*, db: Session, user: User) -> dict: + rules = list_rules_for_user(db, user.id) + time_entries = ( + db.execute(select(TimeEntry).where(TimeEntry.user_id == user.id).order_by(TimeEntry.date.asc())).scalars().all() + ) + vacation_periods = ( + db.execute(select(VacationPeriod).where(VacationPeriod.user_id == user.id).order_by(VacationPeriod.start_date.asc())) + .scalars() + .all() + ) + special_statuses = ( + db.execute(select(SpecialDayStatus).where(SpecialDayStatus.user_id == user.id).order_by(SpecialDayStatus.date.asc())) + .scalars() + .all() + ) + overtime_adjustments = ( + db.execute( + select(OvertimeAdjustment).where(OvertimeAdjustment.user_id == user.id).order_by(OvertimeAdjustment.date.asc()) + ) + .scalars() + .all() + ) + return { + "backup_version": 2, + "app_name": settings.app_name, + "app_version": settings.app_version, + "exported_at": utc_now().isoformat(), + "settings": { + "weekly_target_minutes": user.weekly_target_minutes, + "preferred_home_view": user.preferred_home_view, + "preferred_month_view_mode": user.preferred_month_view_mode, + "entry_mode": user.entry_mode, + "working_days": sorted(get_user_working_days(user)), + "count_vacation_as_worktime": user.count_vacation_as_worktime, + "count_holiday_as_worktime": user.count_holiday_as_worktime, + "count_sick_as_worktime": user.count_sick_as_worktime, + "automatic_break_rules_enabled": user.automatic_break_rules_enabled, + "default_break_minutes": user.default_break_minutes, + "overtime_start_date": user.overtime_start_date.isoformat() if user.overtime_start_date else None, + "overtime_expiry_days": user.overtime_expiry_days, + "expire_negative_overtime": user.expire_negative_overtime, + "vacation_days_total": user.vacation_days_total, + "vacation_show_in_header": user.vacation_show_in_header, + "workhours_counter_enabled": user.workhours_counter_enabled, + "workhours_counter_show_in_header": user.workhours_counter_show_in_header, + "workhours_counter_start_date": ( + user.workhours_counter_start_date.isoformat() if user.workhours_counter_start_date else None + ), + "workhours_counter_end_date": ( + user.workhours_counter_end_date.isoformat() if user.workhours_counter_end_date else None + ), + "workhours_counter_manual_offset_minutes": user.workhours_counter_manual_offset_minutes, + "workhours_counter_target_minutes": user.workhours_counter_target_minutes, + "workhours_counter_target_email_enabled": user.workhours_counter_target_email_enabled, + "federal_state": user.federal_state, + }, + "weekly_target_rules": [ + { + "effective_from": rule.effective_from.isoformat(), + "weekly_target_minutes": rule.weekly_target_minutes, + } + for rule in rules + ], + "time_entries": [ + { + "date": entry.date.isoformat(), + "start_minutes": entry.start_minutes, + "end_minutes": entry.end_minutes, + "break_minutes": entry.break_minutes, + "break_rule_mode": entry.break_rule_mode, + "notes": entry.notes, + } + for entry in time_entries + ], + "vacation_periods": [ + { + "start_date": period.start_date.isoformat(), + "end_date": period.end_date.isoformat(), + "include_weekends": period.include_weekends, + "notes": period.notes, + } + for period in vacation_periods + ], + "special_day_statuses": [ + { + "date": status.date.isoformat(), + "status": status.status, + "notes": status.notes, + } + for status in special_statuses + ], + "overtime_adjustments": [ + { + "date": adjustment.date.isoformat(), + "minutes": adjustment.minutes, + "notes": adjustment.notes, + } + for adjustment in overtime_adjustments + ], + } + + def import_preview_view_data(*, db: Session, user: User, preview: ImportPreview, payload: dict) -> dict: + summary = build_import_preview(db=db, user=user, payload=payload, mode=preview.mode) + summary["id"] = preview.id + return summary + + @app.on_event("startup") + async def startup_auto_holiday_sync() -> None: + with Session(get_engine()) as db: + sync_auto_holidays_for_all_users( + db=db, + from_date=date.today().replace(month=1, day=1) - timedelta(days=366), + to_date=date.today().replace(month=12, day=31) + timedelta(days=730), + ) + sync_auto_entries_for_all_users(db=db) + db.commit() + + @app.get("/health") + async def health() -> dict: + return {"status": "ok"} + + @app.get("/manifest.webmanifest") + async def web_manifest() -> JSONResponse: + with open("app/static/manifest.webmanifest", "r", encoding="utf-8") as manifest_file: + manifest = json.load(manifest_file) + + icon_suffix = f"?v={asset_version}" + if settings.app_env != "production": + manifest["icons"] = [ + { + "src": f"/static/icons/pwa-stage-192.png{icon_suffix}", + "sizes": "192x192", + "type": "image/png", + }, + { + "src": f"/static/icons/pwa-stage-512.png{icon_suffix}", + "sizes": "512x512", + "type": "image/png", + }, + ] + else: + manifest["icons"] = [ + { + "src": f"/static/icons/pwa-192.png{icon_suffix}", + "sizes": "192x192", + "type": "image/png", + }, + { + "src": f"/static/icons/pwa-512.png{icon_suffix}", + "sizes": "512x512", + "type": "image/png", + }, + ] + + response = JSONResponse(manifest, media_type="application/manifest+json") + response.headers["Cache-Control"] = "no-store" + return response + + def render_legal_page(request: Request, *, db: Session, key: str, title: str, subtitle: str | None = None) -> HTMLResponse: + markdown_text = get_site_content_markdown(db, key) + html_content = render_safe_markdown(markdown_text) + user = get_current_user(request, db) + return templates.TemplateResponse( + "pages/legal_page.html", + build_context( + request, + user=user, + db=db, + title=title, + subtitle=subtitle, + content_html=html_content, + ), + ) + + @app.get("/impressum", response_class=HTMLResponse) + async def impressum_page(request: Request, db: Session = Depends(get_db)): + return render_legal_page( + request, + db=db, + key=SITE_CONTENT_IMPRESSUM, + title="Impressum", + subtitle="Angaben zum Anbieter und Kontakt.", + ) + + @app.get("/datenschutz", response_class=HTMLResponse) + async def privacy_page(request: Request, db: Session = Depends(get_db)): + return render_legal_page( + request, + db=db, + key=SITE_CONTENT_PRIVACY, + title="Datenschutz", + subtitle="Informationen zur Verarbeitung personenbezogener Daten in Stundenfuchs.", + ) + + @app.get("/kontakt", response_class=HTMLResponse) + async def contact_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + return templates.TemplateResponse( + "pages/contact.html", + build_context( + request, + user=user, + db=db, + title="Kontakt", + category_options=ticket_category_options(), + contact_started_at=contact_form_started_at(request), + contact_name="", + contact_email=user.email if user else "", + contact_subject="", + contact_message="", + contact_category="problem", + success_message="Deine Nachricht wurde gesendet." if request.query_params.get("msg") == "sent" else None, + ), + ) + + @app.post("/kontakt", response_class=HTMLResponse) + async def contact_submit( + request: Request, + category: str = Form(default="problem"), + name: str = Form(default=""), + email: str = Form(...), + subject: str = Form(...), + message: str = Form(...), + website: str = Form(default=""), + started_at: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + user = get_current_user(request, db) + + normalized_name = name.strip() + normalized_email = email.strip().lower() + normalized_subject = subject.strip() + normalized_message = message.strip() + started_at_expected = request.session.get("contact_form_started_at") + + def render_contact_error(error_message: str, *, status_code: int = status.HTTP_400_BAD_REQUEST) -> HTMLResponse: + issue_contact_form_started_at(request) + return templates.TemplateResponse( + "pages/contact.html", + build_context( + request, + user=user, + db=db, + title="Kontakt", + category_options=ticket_category_options(), + contact_started_at=contact_form_started_at(request), + contact_name=normalized_name, + contact_email=normalized_email, + contact_subject=normalized_subject, + contact_message=normalized_message, + contact_category=category, + error=error_message, + ), + status_code=status_code, + ) + + if website.strip(): + return render_contact_error("Nachricht konnte nicht versendet werden.", status_code=status.HTTP_429_TOO_MANY_REQUESTS) + if category not in {item["value"] for item in ticket_category_options()}: + return render_contact_error("Bitte eine gültige Kategorie auswählen.") + if not normalized_email or "@" not in normalized_email: + return render_contact_error("Bitte eine gültige E-Mail-Adresse eingeben.") + if len(normalized_subject) < 4 or len(normalized_subject) > 180: + return render_contact_error("Der Betreff muss zwischen 4 und 180 Zeichen lang sein.") + if len(normalized_message) < 10 or len(normalized_message) > 5000: + return render_contact_error("Die Nachricht muss zwischen 10 und 5000 Zeichen lang sein.") + if started_at_expected != started_at: + return render_contact_error("Das Formular ist abgelaufen. Bitte erneut absenden.") + try: + started_at_value = datetime.fromisoformat(started_at) + except ValueError: + return render_contact_error("Das Formular ist abgelaufen. Bitte erneut absenden.") + if ensure_utc_datetime(started_at_value) > utc_now() - timedelta(seconds=SUPPORT_TICKET_MIN_FORM_SECONDS): + return render_contact_error( + "Die Nachricht wurde zu schnell abgesendet. Bitte kurz warten und erneut versuchen.", + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + ) + + ip_hash = support_ticket_ip_hash(request) + if support_ticket_rate_limited(db=db, ip_hash=ip_hash, email=normalized_email): + return render_contact_error( + "Es wurden in kurzer Zeit bereits zu viele Nachrichten gesendet. Bitte später erneut versuchen.", + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + ) + + ticket = SupportTicket( + user_id=user.id if user else None, + category=category, + status=SUPPORT_TICKET_STATUS_OPEN, + name=normalized_name, + email=normalized_email, + subject=normalized_subject, + message=normalized_message, + source_ip_hash=ip_hash, + source_user_agent=(request.headers.get("user-agent", "") or "")[:512], + ) + db.add(ticket) + db.commit() + send_support_ticket_notification(db=db, ticket=ticket) + issue_contact_form_started_at(request) + return RedirectResponse(url="/kontakt?msg=sent", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/", response_class=HTMLResponse) + async def root(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/landing.html", + build_context(request), + ) + + @app.get("/register", response_class=HTMLResponse) + async def register_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/register.html", + build_context( + request, + error=None, + federal_state_options=GERMAN_STATE_OPTIONS, + weekday_options=weekday_options, + today_iso=date.today().isoformat(), + email_mfa_available=resolve_mail_settings(db) is not None, + ), + ) + + @app.get("/hilfe", response_class=HTMLResponse) + async def help_page(request: Request, db: Session = Depends(get_db)): + user = require_user(request, db) + return templates.TemplateResponse( + "pages/help.html", + build_context(request, user=user), + ) + + @app.post("/register", response_class=HTMLResponse) + async def register_submit( + request: Request, + email: str = Form(...), + password: str = Form(...), + backup_file: UploadFile | None = File(default=None), + federal_state: str = Form(default=""), + vacation_days_total_value: str = Form(default="", alias="vacation_days_total"), + weekly_target_hours_value: str = Form(default="", alias="weekly_target_hours"), + vacation_show_in_header: str | None = Form(default=None), + preferred_home_view: str = Form(default=""), + entry_mode: str = Form(default="manual"), + overtime_start_date_value: str = Form(default="", alias="overtime_start_date"), + overtime_expiry_days_value: str = Form(default="", alias="overtime_expiry_days"), + expire_negative_overtime: str | None = Form(default=None), + workhours_counter_enabled: str | None = Form(default=None), + workhours_counter_show_in_header: str | None = Form(default=None), + workhours_counter_start_date_value: str = Form(default="", alias="workhours_counter_start_date"), + workhours_counter_end_date_value: str = Form(default="", alias="workhours_counter_end_date"), + workhours_counter_manual_offset_hours_value: str = Form(default="", alias="workhours_counter_manual_offset_hours"), + workhours_counter_target_hours_value: str = Form(default="", alias="workhours_counter_target_hours"), + workhours_counter_target_email_enabled: str | None = Form(default=None), + working_days_values: list[str] = Form(default=[], alias="working_days"), + count_vacation_as_worktime: str | None = Form(default=None), + count_holiday_as_worktime: str | None = Form(default=None), + count_sick_as_worktime: str | None = Form(default=None), + automatic_break_rules_enabled: str | None = Form(default=None), + mfa_preference: str = Form(default="none"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + + def render_register_error(message: str, status_code: int = status.HTTP_400_BAD_REQUEST) -> HTMLResponse: + return templates.TemplateResponse( + "pages/register.html", + build_context( + request, + error=message, + federal_state_options=GERMAN_STATE_OPTIONS, + weekday_options=weekday_options, + today_iso=date.today().isoformat(), + email_mfa_available=resolve_mail_settings(db) is not None, + ), + status_code=status_code, + ) + + try: + payload = RegisterRequest(email=email, password=password) + except Exception as exc: # pydantic validation + return render_register_error(str(exc)) + + backup_payload: dict | None = None + if backup_file and (backup_file.filename or "").strip(): + try: + backup_payload = load_backup_payload_from_bytes(await backup_file.read()) + except BackupImportError as exc: + return render_register_error(str(exc)) + + existing = find_user_by_email(db, payload.email) + if existing: + return render_register_error("E-Mail ist bereits registriert", status.HTTP_409_CONFLICT) + + normalized_state = normalize_german_state_code(federal_state) + if federal_state.strip() and normalized_state is None: + return render_register_error("Ungültiges Bundesland ausgewählt.") + + selected_working_days = [0, 1, 2, 3, 4] + if working_days_values: + try: + selected_working_days = parse_weekday_values(working_days_values) + except HTTPException as exc: + return render_register_error(exc.detail, exc.status_code) + + selected_home_view = preferred_home_view.strip() or "week" + if selected_home_view not in {"week", "month"}: + return render_register_error("Ungültige Standardansicht.") + + selected_entry_mode = entry_mode.strip() or ENTRY_MODE_MANUAL + if selected_entry_mode not in {ENTRY_MODE_MANUAL, ENTRY_MODE_AUTO_UNTIL_TODAY}: + return render_register_error("Ungültiger Erfassungsmodus.") + + overtime_start_date = None + if overtime_start_date_value.strip(): + try: + overtime_start_date = parse_date_query(overtime_start_date_value.strip()) + except HTTPException as exc: + return render_register_error(exc.detail, exc.status_code) + + overtime_expiry_days = None + if overtime_expiry_days_value.strip(): + try: + overtime_expiry_days = int(overtime_expiry_days_value.strip()) + except ValueError: + return render_register_error("Verfall muss eine ganze Zahl in Tagen sein.") + if overtime_expiry_days <= 0: + return render_register_error("Verfall muss größer als 0 sein.") + if overtime_expiry_days > 3650: + return render_register_error("Verfall ist zu groß (maximal 3650 Tage).") + + vacation_days_total = 0 + if vacation_days_total_value.strip(): + try: + vacation_days_total = int(vacation_days_total_value.strip()) + except ValueError: + return render_register_error("Gesamturlaubstage müssen eine ganze Zahl sein.") + if vacation_days_total < 0: + return render_register_error("Gesamturlaubstage dürfen nicht negativ sein.") + if vacation_days_total > 365: + return render_register_error("Gesamturlaubstage sind zu groß (maximal 365).") + + weekly_target_minutes = 1500 + if weekly_target_hours_value.strip(): + try: + weekly_target_hours = float(weekly_target_hours_value.strip().replace(",", ".")) + except ValueError: + return render_register_error("Wochenstunden müssen eine Zahl sein.") + weekly_target_minutes = int(round(weekly_target_hours * 60)) + if weekly_target_minutes <= 0: + return render_register_error("Wochenstunden müssen größer als 0 sein.") + + counter_enabled = workhours_counter_enabled == "on" + counter_show_in_header = workhours_counter_show_in_header == "on" + counter_target_email_enabled = workhours_counter_target_email_enabled == "on" + counter_start_date = None + counter_end_date = None + counter_manual_offset_minutes = 0 + counter_target_minutes: int | None = None + + if workhours_counter_start_date_value.strip(): + try: + counter_start_date = parse_date_query(workhours_counter_start_date_value.strip()) + except HTTPException as exc: + return render_register_error(exc.detail, exc.status_code) + if workhours_counter_end_date_value.strip(): + try: + counter_end_date = parse_date_query(workhours_counter_end_date_value.strip()) + except HTTPException as exc: + return render_register_error(exc.detail, exc.status_code) + if counter_enabled: + if counter_start_date is None or counter_end_date is None: + return render_register_error("Bitte Start- und Enddatum für den Arbeitsstunden-Counter setzen.") + if counter_end_date < counter_start_date: + return render_register_error("Enddatum darf nicht vor dem Startdatum liegen.") + if workhours_counter_manual_offset_hours_value.strip(): + try: + counter_manual_offset_hours = float(workhours_counter_manual_offset_hours_value.strip().replace(",", ".")) + except ValueError: + return render_register_error("Zusatzstunden müssen eine Zahl sein.") + counter_manual_offset_minutes = int(round(counter_manual_offset_hours * 60)) + if counter_manual_offset_minutes < 0: + return render_register_error("Zusatzstunden dürfen nicht negativ sein.") + if workhours_counter_target_hours_value.strip(): + try: + counter_target_hours = float(workhours_counter_target_hours_value.strip().replace(",", ".")) + except ValueError: + return render_register_error("Stundenziel muss eine Zahl sein.") + counter_target_minutes = int(round(counter_target_hours * 60)) + if counter_target_minutes <= 0: + return render_register_error("Stundenziel muss größer als 0 sein.") + + if mfa_preference not in {MFA_METHOD_NONE, MFA_METHOD_EMAIL, MFA_METHOD_TOTP}: + return render_register_error("Ungültige 2FA-Auswahl.") + + mail_settings_available = resolve_mail_settings(db) is not None + verification_enabled = settings.email_verification_required and mail_settings_available + + selected_mfa_method = MFA_METHOD_NONE + if mfa_preference == MFA_METHOD_EMAIL and mail_settings_available: + selected_mfa_method = MFA_METHOD_EMAIL + + if backup_payload is not None: + selected_home_view = "week" + selected_entry_mode = ENTRY_MODE_MANUAL + selected_working_days = [0, 1, 2, 3, 4] + normalized_state = None + overtime_start_date = None + overtime_expiry_days = None + vacation_days_total = 0 + weekly_target_minutes = 1500 + counter_enabled = False + counter_show_in_header = False + counter_start_date = None + counter_end_date = None + counter_manual_offset_minutes = 0 + counter_target_minutes = None + + user = User( + email=payload.email.lower(), + password_hash=hash_password(payload.password), + role="admin" if is_bootstrap_admin_identity(payload.email) else "user", + preferred_home_view=selected_home_view, + entry_mode=selected_entry_mode, + working_days_csv=serialize_working_days(selected_working_days), + count_vacation_as_worktime=count_vacation_as_worktime == "on", + count_holiday_as_worktime=count_holiday_as_worktime == "on", + count_sick_as_worktime=count_sick_as_worktime == "on", + automatic_break_rules_enabled=automatic_break_rules_enabled == "on", + overtime_start_date=overtime_start_date, + overtime_expiry_days=overtime_expiry_days, + expire_negative_overtime=expire_negative_overtime == "on", + vacation_days_total=vacation_days_total, + weekly_target_minutes=weekly_target_minutes, + vacation_show_in_header=(vacation_show_in_header == "on" if vacation_show_in_header is not None else True), + workhours_counter_enabled=counter_enabled, + workhours_counter_show_in_header=counter_show_in_header and counter_enabled, + workhours_counter_start_date=counter_start_date, + workhours_counter_end_date=counter_end_date, + workhours_counter_manual_offset_minutes=counter_manual_offset_minutes, + workhours_counter_target_minutes=counter_target_minutes, + workhours_counter_target_email_enabled=( + counter_target_email_enabled + and counter_enabled + and counter_target_minutes is not None + and mail_settings_available + ), + federal_state=normalized_state, + mfa_method=selected_mfa_method, + email_verified=not verification_enabled, + ) + db.add(user) + db.commit() + db.refresh(user) + send_registration_admin_notification(db=db, user=user, source="register_form") + if normalized_state: + sync_auto_holidays_for_user( + db=db, + user=user, + from_date=date.today().replace(month=1, day=1) - timedelta(days=366), + to_date=date.today().replace(month=12, day=31) + timedelta(days=730), + ) + if backup_payload is not None: + execute_backup_import(db=db, user=user, payload=backup_payload, mode=IMPORT_MODE_REPLACE) + sync_auto_holidays_for_user( + db=db, + user=user, + from_date=date.today().replace(month=1, day=1) - timedelta(days=366), + to_date=date.today().replace(month=12, day=31) + timedelta(days=730), + ) + autofill_entries_for_range( + db=db, + user=user, + range_start=date(1970, 1, 1), + range_end=date.today(), + ) + else: + ensure_user_has_default_target_rule(db, user) + if selected_entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY: + autofill_entries_for_range( + db=db, + user=user, + range_start=user.created_at.date(), + range_end=date.today(), + ) + db.commit() + + if verification_enabled: + sent, reason = send_email_verification_link(request=request, db=db, user=user) + logger.info("register_pending_verification email=%s sent=%s reason=%s", user.email, sent, reason) + if not sent and reason != "rate_limited": + return RedirectResponse(url="/login?msg=email_verification_send_failed", status_code=status.HTTP_303_SEE_OTHER) + return RedirectResponse(url="/login?msg=email_verification_sent", status_code=status.HTTP_303_SEE_OTHER) + + send_registration_email_if_enabled(db=db, user=user) + logger.info("register_success email=%s", user.email) + login_user(request, user) + if mfa_preference == MFA_METHOD_TOTP: + request.session["mfa_setup_secret"] = generate_totp_secret() + return RedirectResponse(url="/settings?msg=mfa_setup_required", status_code=status.HTTP_303_SEE_OTHER) + if mfa_preference == MFA_METHOD_EMAIL and selected_mfa_method == MFA_METHOD_NONE: + return RedirectResponse(url="/settings?msg=mfa_email_unavailable", status_code=status.HTTP_303_SEE_OTHER) + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/login", response_class=HTMLResponse) + async def login_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + msg = request.query_params.get("msg") + success_message = None + error_message = None + if msg == "password_reset_done": + success_message = "Passwort wurde erfolgreich gesetzt. Bitte jetzt anmelden." + elif msg == "account_deleted": + success_message = "Dein Konto und alle zugehörigen Daten wurden gelöscht." + elif msg == "email_verification_sent": + success_message = "Bitte bestätige zuerst deine E-Mail-Adresse über den Link in der E-Mail." + elif msg == "email_verified": + success_message = "E-Mail-Adresse bestätigt. Du kannst dich jetzt anmelden." + elif msg == "email_verification_send_failed": + error_message = ( + "Konto wurde erstellt, aber die Bestätigungs-E-Mail konnte nicht versendet werden. " + "Bitte fordere einen neuen Link an." + ) + return templates.TemplateResponse( + "pages/login.html", + build_context(request, error=error_message, success_message=success_message), + ) + + @app.post("/login", response_class=HTMLResponse) + async def login_submit( + request: Request, + email: str = Form(...), + password: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + + try: + payload = LoginRequest(email=email, password=password) + except Exception as exc: + return templates.TemplateResponse( + "pages/login.html", + build_context(request, error=str(exc)), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + client_ip = get_client_ip(request) + blocked, retry_minutes = is_login_blocked( + db, + payload.email, + client_ip, + settings.login_rate_limit_attempts, + settings.login_rate_limit_window_minutes, + ) + if blocked: + logger.warning("login_blocked email=%s ip=%s", payload.email.lower(), client_ip) + return templates.TemplateResponse( + "pages/login.html", + build_context(request, error=f"Zu viele Fehlversuche. Bitte in {retry_minutes} Minuten erneut versuchen."), + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + ) + + user = find_user_by_email(db, payload.email) + if not user or not user.is_active or not verify_password(payload.password, user.password_hash): + register_failed_attempt(db, payload.email, client_ip) + logger.warning("login_failed email=%s ip=%s", payload.email.lower(), client_ip) + return templates.TemplateResponse( + "pages/login.html", + build_context(request, error="Ungueltige Anmeldedaten"), + status_code=status.HTTP_401_UNAUTHORIZED, + ) + if settings.email_verification_required and not user.email_verified: + logger.info("login_blocked_unverified email=%s ip=%s", payload.email.lower(), client_ip) + return templates.TemplateResponse( + "pages/login.html", + build_context( + request, + error=( + "Bitte zuerst deine E-Mail-Adresse bestätigen. " + "Du kannst unten einen neuen Bestätigungslink anfordern." + ), + ), + status_code=status.HTTP_403_FORBIDDEN, + ) + + register_successful_attempt(db, payload.email, client_ip) + logger.info("login_success email=%s ip=%s", payload.email.lower(), client_ip) + may_login_directly, mfa_error = start_mfa_challenge(request, db, user) + if mfa_error: + return templates.TemplateResponse( + "pages/login.html", + build_context(request, error=mfa_error), + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + ) + if may_login_directly: + login_user(request, user) + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + return RedirectResponse(url="/login/mfa", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/login/mfa", response_class=HTMLResponse) + async def login_mfa_form(request: Request, db: Session = Depends(get_db)): + current_user = get_current_user(request, db) + if current_user: + return RedirectResponse(url=user_home_url(current_user), status_code=status.HTTP_303_SEE_OTHER) + pending_user, pending_method = get_pending_mfa_user(request, db) + if not pending_user or not pending_method: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/mfa_challenge.html", + build_context( + request, + error=None, + mfa_method=pending_method, + mfa_method_label=MFA_METHOD_LABELS.get(pending_method, "2FA"), + mfa_is_email=pending_method == MFA_METHOD_EMAIL, + ), + ) + + @app.post("/login/mfa", response_class=HTMLResponse) + async def login_mfa_submit( + request: Request, + code: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + user, error = verify_pending_mfa_code(request, db, code) + if error or not user: + pending_user, pending_method = get_pending_mfa_user(request, db) + if not pending_user or not pending_method: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/mfa_challenge.html", + build_context( + request, + error=error or "Ungueltiger Code", + mfa_method=pending_method, + mfa_method_label=MFA_METHOD_LABELS.get(pending_method, "2FA"), + mfa_is_email=pending_method == MFA_METHOD_EMAIL, + ), + status_code=status.HTTP_401_UNAUTHORIZED, + ) + login_user(request, user) + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/login/mfa/resend", response_class=HTMLResponse) + async def login_mfa_resend( + request: Request, + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + user, pending_method = get_pending_mfa_user(request, db) + if not user or not pending_method: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + if pending_method != MFA_METHOD_EMAIL: + return RedirectResponse(url="/login/mfa", status_code=status.HTTP_303_SEE_OTHER) + + if email_mfa_resend_cooldown_active(user): + return templates.TemplateResponse( + "pages/mfa_challenge.html", + build_context( + request, + error="Bitte kurz warten, bevor ein neuer Code gesendet wird.", + mfa_method=pending_method, + mfa_method_label=MFA_METHOD_LABELS.get(pending_method, "2FA"), + mfa_is_email=True, + ), + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + ) + + if not send_email_mfa_code(db=db, user=user): + return templates.TemplateResponse( + "pages/mfa_challenge.html", + build_context( + request, + error="Neuer Code konnte nicht versendet werden.", + mfa_method=pending_method, + mfa_method_label=MFA_METHOD_LABELS.get(pending_method, "2FA"), + mfa_is_email=True, + ), + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + ) + return templates.TemplateResponse( + "pages/mfa_challenge.html", + build_context( + request, + success_message="Neuer Code wurde versendet.", + error=None, + mfa_method=pending_method, + mfa_method_label=MFA_METHOD_LABELS.get(pending_method, "2FA"), + mfa_is_email=True, + ), + ) + + @app.post("/logout") + async def logout( + request: Request, + csrf_token: str = Form(default=""), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + logger.info("logout email=%s", user.email) + request.session.clear() + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/verify-email", response_class=HTMLResponse) + async def verify_email(request: Request, token: str = Query(...), db: Session = Depends(get_db)): + user = get_user_by_email_verification_token(db, token) + if not user: + return templates.TemplateResponse( + "pages/email_verification_result.html", + build_context( + request, + success=False, + message="Der Bestätigungslink ist ungültig oder abgelaufen.", + ), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.email_verified = True + user.email_verification_token_hash = None + user.email_verification_expires_at = None + user.email_verification_sent_at = None + db.add(user) + db.commit() + logger.info("email_verified email=%s", user.email) + send_registration_email_if_enabled(db=db, user=user) + return RedirectResponse(url="/login?msg=email_verified", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/verify-email/resend", response_class=HTMLResponse) + async def resend_verification_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/email_verification_resend.html", + build_context(request, error=None, success_message=None), + ) + + @app.post("/verify-email/resend", response_class=HTMLResponse) + async def resend_verification_submit( + request: Request, + email: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + + generic_message = "Wenn ein unbestätigtes Konto mit dieser E-Mail existiert, wurde ein neuer Link versendet." + try: + normalized_email = RegisterRequest(email=email, password="validplaceholder123").email.lower() + except Exception: + return templates.TemplateResponse( + "pages/email_verification_resend.html", + build_context(request, error=None, success_message=generic_message), + ) + + user = find_user_by_email(db, normalized_email) + if user and user.is_active and not user.email_verified and is_email_verification_enabled(db): + sent, reason = send_email_verification_link(request=request, db=db, user=user) + logger.info("email_verification_resend email=%s sent=%s reason=%s", user.email, sent, reason) + return templates.TemplateResponse( + "pages/email_verification_resend.html", + build_context(request, error=None, success_message=generic_message), + ) + + @app.get("/password-reset/request", response_class=HTMLResponse) + async def password_reset_request_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + return templates.TemplateResponse( + "pages/password_reset_request.html", + build_context(request, error=None, success_message=None), + ) + + @app.post("/password-reset/request", response_class=HTMLResponse) + async def password_reset_request_submit( + request: Request, + email: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + generic_message = ( + "Wenn ein Konto mit dieser E-Mail existiert und Mailversand aktiv ist, wurde ein Reset-Link versendet." + ) + + try: + normalized_email = RegisterRequest(email=email, password="validplaceholder123").email.lower() + except Exception: + return templates.TemplateResponse( + "pages/password_reset_request.html", + build_context(request, error=generic_message, success_message=None), + ) + + user = find_user_by_email(db, normalized_email) + config = get_email_config(db) + if user and user.is_active and config and config.password_reset_mails_enabled: + mail_settings = resolve_mail_settings(db) + if mail_settings: + invalidate_password_reset_tokens(db=db, user_id=user.id) + raw_token = generate_reset_token() + token_hash_value = hash_token(raw_token) + reset_token = PasswordResetToken( + user_id=user.id, + token_hash=token_hash_value, + expires_at=utc_now() + timedelta(minutes=settings.password_reset_token_ttl_minutes), + requested_ip=get_client_ip(request), + ) + db.add(reset_token) + db.commit() + base_url = str(request.base_url).rstrip("/") + reset_url = f"{base_url}/password-reset/confirm?token={raw_token}" + try: + send_email( + settings=mail_settings, + to_email=user.email, + subject="Passwort zuruecksetzen", + text_body=( + "Du hast eine Passwort-Zuruecksetzung angefordert.\n\n" + f"Link: {reset_url}\n\n" + f"Der Link ist {settings.password_reset_token_ttl_minutes} Minuten gueltig." + ), + ) + except Exception: + logger.exception("password_reset_email_failed email=%s", user.email) + return templates.TemplateResponse( + "pages/password_reset_request.html", + build_context(request, error=None, success_message=generic_message), + ) + + def get_valid_reset_token(db: Session, raw_token: str) -> PasswordResetToken | None: + token_hash_value = hash_token(raw_token) + stmt = select(PasswordResetToken).where(PasswordResetToken.token_hash == token_hash_value) + token_row = db.execute(stmt).scalar_one_or_none() + if not token_row: + return None + if token_row.used_at is not None: + return None + if utc_now() > ensure_utc_datetime(token_row.expires_at): + return None + return token_row + + def invalidate_password_reset_tokens( + *, + db: Session, + user_id: str, + exclude_token_id: str | None = None, + ) -> int: + stmt = select(PasswordResetToken).where( + PasswordResetToken.user_id == user_id, + PasswordResetToken.used_at.is_(None), + ) + tokens = db.execute(stmt).scalars().all() + if not tokens: + return 0 + invalidated_at = utc_now() + invalidated = 0 + for token in tokens: + if exclude_token_id and token.id == exclude_token_id: + continue + token.used_at = invalidated_at + db.add(token) + invalidated += 1 + return invalidated + + def email_mfa_resend_cooldown_active(user: User) -> bool: + sent_at = user.mfa_email_code_sent_at + if sent_at is None: + return False + return (utc_now() - ensure_utc_datetime(sent_at)) < timedelta(seconds=30) + + @app.get("/password-reset/confirm", response_class=HTMLResponse) + async def password_reset_confirm_form( + request: Request, + token: str = Query(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if user: + return RedirectResponse(url=user_home_url(user), status_code=status.HTTP_303_SEE_OTHER) + token_row = get_valid_reset_token(db, token) + if not token_row: + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token="", error="Reset-Link ist ungueltig oder abgelaufen."), + status_code=status.HTTP_400_BAD_REQUEST, + ) + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token=token, error=None, success_message=None), + ) + + @app.post("/password-reset/confirm", response_class=HTMLResponse) + async def password_reset_confirm_submit( + request: Request, + token: str = Form(...), + new_password: str = Form(...), + new_password_repeat: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + verify_csrf(request, csrf_token) + token_row = get_valid_reset_token(db, token) + if not token_row: + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token="", error="Reset-Link ist ungueltig oder abgelaufen."), + status_code=status.HTTP_400_BAD_REQUEST, + ) + if new_password != new_password_repeat: + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token=token, error="Passwoerter stimmen nicht ueberein."), + status_code=status.HTTP_400_BAD_REQUEST, + ) + if len(new_password) < 10: + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token=token, error="Neues Passwort muss mindestens 10 Zeichen lang sein."), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user = db.execute(select(User).where(User.id == token_row.user_id)).scalar_one_or_none() + if not user: + return templates.TemplateResponse( + "pages/password_reset_confirm.html", + build_context(request, token="", error="Benutzer nicht gefunden."), + status_code=status.HTTP_404_NOT_FOUND, + ) + user.password_hash = hash_password(new_password) + invalidate_password_reset_tokens(db=db, user_id=user.id, exclude_token_id=token_row.id) + token_row.used_at = utc_now() + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + db.add(user) + db.add(token_row) + db.commit() + return RedirectResponse(url="/login?msg=password_reset_done", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/dashboard", response_class=HTMLResponse) + async def dashboard( + request: Request, + date_value: str | None = Query(default=None, alias="date"), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + if date_value is None and user.preferred_home_view == "month": + return RedirectResponse( + url=f"/month?{urlencode({'view': user.preferred_month_view_mode or 'flat'})}", + status_code=status.HTTP_303_SEE_OTHER, + ) + + selected_date = parse_date_query(date_value, default=date.today()) + week_start, week_end = iso_week_bounds(selected_date) + working_days = get_user_working_days(user) + ensure_user_has_default_target_rule(db, user) + db.commit() + rules = list_rules_for_user(db, user.id) + selected_week_target_minutes = target_for_week(rules, week_start, user.weekly_target_minutes) + + auto_created = autofill_entries_for_range(db=db, user=user, range_start=week_start, range_end=week_end) + if auto_created: + db.commit() + + week_entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= week_start, TimeEntry.date <= week_end) + .order_by(TimeEntry.date.asc()) + ) + week_entries = db.execute(week_entries_stmt).scalars().all() + + all_entries_until_week_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date <= week_end) + .order_by(TimeEntry.date.asc()) + ) + all_entries_until_week = db.execute(all_entries_until_week_stmt).scalars().all() + + vacations_selected = list_vacations_for_user(db, user.id, week_start, week_end) + vacation_dates_selected = expand_vacation_dates( + vacations_selected, + week_start, + week_end, + relevant_weekdays=working_days, + ) + special_statuses_selected = list_special_statuses_for_user(db, user.id, week_start, week_end) + special_dates_selected = effective_non_working_dates_for_user(user=user, special_statuses=special_statuses_selected) + count_as_worktime_dates_selected = count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates_selected, + special_statuses=special_statuses_selected, + ) + special_status_by_date = special_status_map(special_statuses_selected) + overtime_adjustments_selected = list_overtime_adjustments_for_user(db, user.id, week_start, week_end) + overtime_adjustments_by_date = overtime_adjustment_map(overtime_adjustments_selected) + week_data = aggregate_week(week_entries, week_start, selected_week_target_minutes) + effective_week_totals = compute_effective_week_totals( + entries=week_entries, + week_start=week_start, + weekly_target_minutes=selected_week_target_minutes, + vacation_dates=vacation_dates_selected, + non_working_dates=special_dates_selected, + count_as_worktime_dates=count_as_worktime_dates_selected, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(overtime_adjustments_selected), + overtime_start_date=user.overtime_start_date, + relevant_weekdays=working_days, + ) + week_data["weekly_ist"] = effective_week_totals["weekly_ist"] + week_data["weekly_soll"] = effective_week_totals["weekly_soll"] + week_data["weekly_delta"] = effective_week_totals["weekly_delta"] + for day_info in week_data["days"]: + day_info["is_vacation"] = day_info["date"] in vacation_dates_selected + day_info["special_status"] = special_status_by_date.get(day_info["date"]) + day_info["overtime_adjustment_minutes"] = ( + overtime_adjustments_by_date[day_info["date"]].minutes if day_info["date"] in overtime_adjustments_by_date else 0 + ) + vacations_until_week = list_vacations_for_user(db, user.id, date(1970, 1, 1), week_end) + special_until_week = list_special_statuses_for_user(db, user.id, date(1970, 1, 1), week_end) + vacation_dates_until_week = expand_vacation_dates( + vacations_until_week, + date(1970, 1, 1), + week_end, + relevant_weekdays=working_days, + ) + overtime_adjustments_until_week = list_overtime_adjustments_for_user(db, user.id, date(1970, 1, 1), week_end) + week_data["cumulative_delta"] = compute_cumulative_overtime_minutes( + entries=all_entries_until_week, + rules=rules, + weekly_target_fallback=user.weekly_target_minutes, + vacation_periods=vacations_until_week, + non_working_dates=effective_non_working_dates_for_user(user=user, special_statuses=special_until_week), + count_as_worktime_dates=count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates_until_week, + special_statuses=special_until_week, + ), + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(overtime_adjustments_until_week), + selected_week_start=week_start, + overtime_start_date=user.overtime_start_date, + overtime_expiry_days=user.overtime_expiry_days, + expire_negative_overtime=user.expire_negative_overtime, + relevant_weekdays=working_days, + ) + week_data["is_vacation_week"] = range_is_full_vacation( + week_start, + week_end, + vacation_dates=vacation_dates_selected, + relevant_weekdays=working_days, + ) + + previous_week = week_start - timedelta(days=7) + next_week = week_start + timedelta(days=7) + workhours_target_warning = build_workhours_target_warning(db=db, user=user) + maybe_send_workhours_target_warning_email(db=db, user=user, warning=workhours_target_warning) + + return templates.TemplateResponse( + "pages/dashboard.html", + build_context( + request, + user=user, + db=db, + week=week_data, + selected_date=selected_date, + previous_week=previous_week, + next_week=next_week, + workhours_target_warning=workhours_target_warning, + ), + ) + + @app.post("/weekly-target") + async def change_weekly_target( + request: Request, + week_start_value: str = Form(..., alias="week_start"), + weekly_target_hours: float = Form(...), + scope: str = Form(...), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + parsed_scope = parse_weekly_target_scope(scope) + selected_week_start = parse_date_query(week_start_value) + + new_target_minutes = int(round(weekly_target_hours * 60)) + if new_target_minutes <= 0: + raise HTTPException(status_code=400, detail="Wochen-Soll muss groesser als 0 sein") + + ensure_user_has_default_target_rule(db, user) + apply_weekly_target_change( + db, + user=user, + selected_week_start=selected_week_start, + new_target_minutes=new_target_minutes, + scope=parsed_scope, + ) + db.commit() + logger.info( + "weekly_target_updated email=%s week_start=%s scope=%s minutes=%s", + user.email, + selected_week_start.isoformat(), + parsed_scope, + new_target_minutes, + ) + + destination = return_to if return_to.startswith("/") else "/dashboard" + base_path, _, existing_query = destination.partition("?") + query_params: dict[str, str] = {} + if existing_query: + for part in existing_query.split("&"): + if not part: + continue + key, sep, value = part.partition("=") + query_params[key] = value if sep else "" + if base_path.startswith("/dashboard"): + query_params.setdefault("date", selected_week_start.isoformat()) + query_params["target_updated"] = "1" + url = f"{base_path}?{urlencode(query_params)}" + return RedirectResponse(url=url, status_code=status.HTTP_303_SEE_OTHER) + + def get_entry_or_404(db: Session, user_id: str, entry_id: str) -> TimeEntry: + stmt = select(TimeEntry).where(TimeEntry.id == entry_id, TimeEntry.user_id == user_id) + entry = db.execute(stmt).scalar_one_or_none() + if not entry: + raise HTTPException(status_code=404, detail="Eintrag nicht gefunden") + return entry + + @app.get("/entry/new", response_class=HTMLResponse) + async def new_entry_form( + request: Request, + date_value: str | None = Query(default=None, alias="date"), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + selected_date = parse_date_query(date_value, default=date.today()) + return_to = resolve_return_to(request, fallback=f"/dashboard?date={selected_date.isoformat()}") + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Tag hinzufuegen", + action_url="/entry/new", + entry={ + "break_mode": "auto" if auto_break_rules_enabled(user) else "manual", + "break_minutes": automatic_break_minutes(start_minutes=8 * 60 + 30, end_minutes=15 * 60) + if auto_break_rules_enabled(user) + else default_break_minutes_for_user(user), + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=selected_date), + selected_date=selected_date, + return_to=return_to, + error=None, + ), + ) + + @app.post("/entry/new", response_class=HTMLResponse) + async def new_entry_submit( + request: Request, + date_value: str = Form(..., alias="date"), + start_time: str = Form(...), + end_time: str = Form(...), + break_minutes: int = Form(default=0), + break_mode: str = Form(default="manual"), + notes: str = Form(default=""), + return_to: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + + try: + payload = TimeEntryCreate( + date=parse_date_query(date_value), + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes or None, + ) + start_minutes = parse_time_to_minutes(payload.start_time) + end_minutes = parse_time_to_minutes(payload.end_time) + effective_break_minutes, effective_break_mode = resolve_break_settings( + user=user, + start_minutes=start_minutes, + end_minutes=end_minutes, + submitted_break_minutes=payload.break_minutes, + submitted_break_mode=payload.break_mode, + ) + compute_net_minutes(start_minutes, end_minutes, effective_break_minutes) + except Exception as exc: + selected_date = parse_date_fallback_today(date_value) + safe_return_to = return_to if return_to.startswith("/") else resolve_return_to( + request, fallback=f"/dashboard?date={selected_date.isoformat()}" + ) + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Tag hinzufuegen", + action_url="/entry/new", + entry={ + "date": date_value, + "start_time": start_time, + "end_time": end_time, + "break_minutes": break_minutes, + "break_mode": break_mode, + "notes": notes, + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=selected_date), + selected_date=selected_date, + return_to=safe_return_to, + error=str(exc), + ), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + entry = TimeEntry( + user_id=user.id, + date=payload.date, + start_minutes=start_minutes, + end_minutes=end_minutes, + break_minutes=effective_break_minutes, + break_rule_mode=effective_break_mode, + notes=payload.notes, + ) + db.add(entry) + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=payload.date) + clear_special_status_for_date(db=db, user_id=user.id, day=payload.date) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=payload.date) + try: + db.commit() + except IntegrityError: + db.rollback() + selected_date = parse_date_fallback_today(date_value) + safe_return_to = return_to if return_to.startswith("/") else resolve_return_to( + request, fallback=f"/dashboard?date={selected_date.isoformat()}" + ) + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Tag hinzufuegen", + action_url="/entry/new", + entry={ + "date": date_value, + "start_time": start_time, + "end_time": end_time, + "break_minutes": break_minutes, + "break_mode": break_mode, + "notes": notes, + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=selected_date), + selected_date=selected_date, + return_to=safe_return_to, + error="Es existiert bereits ein Eintrag fuer dieses Datum.", + ), + status_code=status.HTTP_409_CONFLICT, + ) + + destination = return_to if return_to.startswith("/") else f"/dashboard?{urlencode({'date': payload.date.isoformat()})}" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/entry/{entry_id}/edit", response_class=HTMLResponse) + async def edit_entry_form(entry_id: str, request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + entry = get_entry_or_404(db, user.id, entry_id) + return_to = resolve_return_to(request, fallback=f"/dashboard?date={entry.date.isoformat()}") + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Eintrag bearbeiten", + action_url=f"/entry/{entry.id}/edit", + entry={ + "date": entry.date.isoformat(), + "start_time": minutes_to_hhmm(entry.start_minutes), + "end_time": minutes_to_hhmm(entry.end_minutes), + "break_minutes": entry.break_minutes, + "break_mode": entry.break_rule_mode, + "notes": entry.notes or "", + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=entry.date), + selected_date=entry.date, + return_to=return_to, + error=None, + ), + ) + + @app.post("/entry/{entry_id}/edit", response_class=HTMLResponse) + async def edit_entry_submit( + entry_id: str, + request: Request, + date_value: str = Form(..., alias="date"), + start_time: str = Form(...), + end_time: str = Form(...), + break_minutes: int = Form(default=0), + break_mode: str = Form(default="manual"), + notes: str = Form(default=""), + return_to: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + entry = get_entry_or_404(db, user.id, entry_id) + + try: + payload = TimeEntryCreate( + date=parse_date_query(date_value), + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes or None, + ) + start_minutes = parse_time_to_minutes(payload.start_time) + end_minutes = parse_time_to_minutes(payload.end_time) + effective_break_minutes, effective_break_mode = resolve_break_settings( + user=user, + start_minutes=start_minutes, + end_minutes=end_minutes, + submitted_break_minutes=payload.break_minutes, + submitted_break_mode=payload.break_mode, + existing_break_mode=entry.break_rule_mode, + existing_break_minutes=entry.break_minutes, + start_or_end_changed=(start_minutes != entry.start_minutes or end_minutes != entry.end_minutes), + ) + compute_net_minutes(start_minutes, end_minutes, effective_break_minutes) + except Exception as exc: + selected_date = parse_date_fallback_today(date_value) + safe_return_to = return_to if return_to.startswith("/") else resolve_return_to( + request, fallback=f"/dashboard?date={selected_date.isoformat()}" + ) + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Eintrag bearbeiten", + action_url=f"/entry/{entry.id}/edit", + entry={ + "date": date_value, + "start_time": start_time, + "end_time": end_time, + "break_minutes": break_minutes, + "break_mode": break_mode, + "notes": notes, + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=selected_date), + selected_date=selected_date, + return_to=safe_return_to, + error=str(exc), + ), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + entry.date = payload.date + entry.start_minutes = start_minutes + entry.end_minutes = end_minutes + entry.break_minutes = effective_break_minutes + entry.break_rule_mode = effective_break_mode + entry.notes = payload.notes + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=payload.date) + clear_special_status_for_date(db=db, user_id=user.id, day=payload.date) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=payload.date) + + try: + db.commit() + except IntegrityError: + db.rollback() + selected_date = parse_date_fallback_today(date_value) + safe_return_to = return_to if return_to.startswith("/") else resolve_return_to( + request, fallback=f"/dashboard?date={selected_date.isoformat()}" + ) + return templates.TemplateResponse( + "pages/entry_form.html", + build_context( + request, + user=user, + db=db, + title="Eintrag bearbeiten", + action_url=f"/entry/{entry.id}/edit", + entry={ + "date": date_value, + "start_time": start_time, + "end_time": end_time, + "break_minutes": break_minutes, + "break_mode": break_mode, + "notes": notes, + }, + full_day_net_minutes=full_day_work_minutes_or_none(db=db, user=user, selected_date=selected_date), + selected_date=selected_date, + return_to=safe_return_to, + error="Es existiert bereits ein Eintrag fuer dieses Datum.", + ), + status_code=status.HTTP_409_CONFLICT, + ) + + destination = return_to if return_to.startswith("/") else f"/dashboard?{urlencode({'date': entry.date.isoformat()})}" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/entry/{entry_id}/delete") + async def delete_entry( + entry_id: str, + request: Request, + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + + entry = get_entry_or_404(db, user.id, entry_id) + selected_date = entry.date + db.delete(entry) + if user.entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY and selected_date <= date.today(): + mark_auto_entry_skip_for_date(db=db, user_id=user.id, day=selected_date) + db.commit() + + dashboard_url = f"/dashboard?{urlencode({'date': selected_date.isoformat()})}" + return RedirectResponse(url=dashboard_url, status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/day-status/edit", response_class=HTMLResponse) + async def edit_day_status_form( + request: Request, + date_value: str | None = Query(default=None, alias="date"), + status_value: str = Query(..., alias="status"), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + selected_date = parse_date_query(date_value, default=date.today()) + status_key = parse_day_status_mode(status_value) + existing_entry_stmt = select(TimeEntry).where(TimeEntry.user_id == user.id, TimeEntry.date == selected_date) + existing_entry = db.execute(existing_entry_stmt).scalar_one_or_none() + day_is_vacation, day_special_status = day_status_for_user(db=db, user=user, selected_date=selected_date) + active_status_key = current_day_status_key(is_vacation=day_is_vacation, special_status=day_special_status) + day_adjustment = overtime_adjustment_map( + list_overtime_adjustments_for_user(db, user.id, selected_date, selected_date) + ).get(selected_date) + return_to = resolve_return_to(request, fallback=f"/dashboard?date={selected_date.isoformat()}") + + if status_key == DAY_STATUS_QUERY_VACATION: + action_url = "/vacation/day/toggle" + else: + action_url = "/special-day/toggle" + + return templates.TemplateResponse( + "pages/day_status_form.html", + build_context( + request, + user=user, + db=db, + title=DAY_STATUS_QUERY_LABELS[status_key], + selected_date=selected_date, + status_key=status_key, + action_url=action_url, + is_active=active_status_key == status_key, + current_status_key=active_status_key, + current_status_label=DAY_STATUS_QUERY_LABELS.get(active_status_key) if active_status_key else None, + day_overtime_adjustment_minutes=day_adjustment.minutes if day_adjustment else 0, + has_entry=existing_entry is not None, + existing_entry_id=existing_entry.id if existing_entry else None, + return_to=return_to, + ), + ) + + @app.get("/overtime-adjustment/edit", response_class=HTMLResponse) + async def edit_overtime_adjustment_form( + request: Request, + date_value: str | None = Query(default=None, alias="date"), + overtime_error: str | None = Query(default=None), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + selected_date = parse_date_query(date_value, default=date.today()) + existing_entry_stmt = select(TimeEntry).where(TimeEntry.user_id == user.id, TimeEntry.date == selected_date) + existing_entry = db.execute(existing_entry_stmt).scalar_one_or_none() + day_is_vacation, day_special_status = day_status_for_user(db=db, user=user, selected_date=selected_date) + day_adjustment = overtime_adjustment_map( + list_overtime_adjustments_for_user(db, user.id, selected_date, selected_date) + ).get(selected_date) + return_to = resolve_return_to(request, fallback=f"/dashboard?date={selected_date.isoformat()}") + + return templates.TemplateResponse( + "pages/overtime_adjustment_form.html", + build_context( + request, + user=user, + db=db, + title=OVERTIME_ADJUSTMENT_LABEL, + selected_date=selected_date, + day_is_vacation=day_is_vacation, + day_special_status=day_special_status, + day_overtime_adjustment_minutes=day_adjustment.minutes if day_adjustment else 0, + overtime_adjustment_error=overtime_error, + has_entry=existing_entry is not None, + existing_entry_id=existing_entry.id if existing_entry else None, + return_to=return_to, + ), + ) + + @app.get("/month", response_class=HTMLResponse) + async def month_view( + request: Request, + month: str | None = Query(default=None), + view_mode: str | None = Query(default=None, alias="view"), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + ensure_user_has_default_target_rule(db, user) + db.commit() + + if view_mode is None: + view_mode = user.preferred_month_view_mode or "flat" + if view_mode not in {"flat"}: + view_mode = "flat" + + if month: + try: + month_date = datetime.strptime(month, "%Y-%m").date() + except ValueError as exc: + raise HTTPException(status_code=400, detail="month muss YYYY-MM sein") from exc + else: + today = date.today() + month_date = date(today.year, today.month, 1) + + month_start = date(month_date.year, month_date.month, 1) + if month_start.month == 12: + next_month = date(month_start.year + 1, 1, 1) + else: + next_month = date(month_start.year, month_start.month + 1, 1) + month_end = next_month - timedelta(days=1) + working_days = get_user_working_days(user) + + auto_created = autofill_entries_for_range(db=db, user=user, range_start=month_start, range_end=month_end) + if auto_created: + db.commit() + + stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= month_start, TimeEntry.date <= month_end) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(stmt).scalars().all() + + entries_by_date = {entry.date: entry for entry in entries} + + displayed_week_starts: set[date] = set() + current = month_start + while current <= month_end: + week_start = monday_of(current) + displayed_week_starts.add(week_start) + current += timedelta(days=1) + ordered_week_starts = sorted(displayed_week_starts) + full_display_start = min(ordered_week_starts) + full_display_end = max(ordered_week_starts) + timedelta(days=6) + + rules = list_rules_for_user(db, user.id) + month_vacations = list_vacations_for_user(db, user.id, month_start, month_end) + month_vacation_dates = expand_vacation_dates( + month_vacations, + month_start, + month_end, + relevant_weekdays=working_days, + ) + display_vacations = list_vacations_for_user(db, user.id, full_display_start, full_display_end) + display_vacation_dates = expand_vacation_dates( + display_vacations, + full_display_start, + full_display_end, + relevant_weekdays=working_days, + ) + month_special_statuses = list_special_statuses_for_user(db, user.id, month_start, month_end) + month_special_dates = effective_non_working_dates_for_user(user=user, special_statuses=month_special_statuses) + month_count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=month_vacation_dates, + special_statuses=month_special_statuses, + ) + display_special_statuses = list_special_statuses_for_user(db, user.id, full_display_start, full_display_end) + display_special_status_map = special_status_map(display_special_statuses) + display_overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, full_display_start, full_display_end) + display_overtime_adjustment_map = overtime_adjustment_map(display_overtime_adjustments) + month_overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, month_start, month_end) + + month_ist = 0 + month_soll = 0 + month_delta = 0 + weeks: list[dict] = [] + for week_start in ordered_week_starts: + week_end = week_start + timedelta(days=6) + visible_start = max(week_start, month_start) + visible_end = min(week_end, month_end) + weekly_target_minutes = target_for_week(rules, week_start, user.weekly_target_minutes) + week_totals = compute_effective_span_totals( + entries=entries, + range_start=visible_start, + range_end=visible_end, + weekly_target_minutes=weekly_target_minutes, + vacation_dates=month_vacation_dates, + non_working_dates=month_special_dates, + count_as_worktime_dates=month_count_as_worktime_dates, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(month_overtime_adjustments), + overtime_start_date=user.overtime_start_date, + relevant_weekdays=working_days, + ) + weekly_ist = week_totals["ist_minutes"] + weekly_soll = week_totals["soll_minutes"] + weekly_delta = week_totals["delta_minutes"] + vacation_days_visible = week_totals["vacation_workdays"] + month_ist += weekly_ist + month_soll += weekly_soll + month_delta += weekly_delta + + week_days = [] + cursor = visible_start + while cursor <= visible_end: + entry = entries_by_date.get(cursor) + if entry: + net = compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + week_days.append( + { + "date": cursor, + "entry": entry, + "net_minutes": net, + "is_weekend": cursor.weekday() >= 5, + "is_vacation": cursor in display_vacation_dates, + "special_status": display_special_status_map.get(cursor), + "overtime_adjustment_minutes": ( + display_overtime_adjustment_map[cursor].minutes if cursor in display_overtime_adjustment_map else 0 + ), + } + ) + else: + week_days.append( + { + "date": cursor, + "entry": None, + "net_minutes": 0, + "is_weekend": cursor.weekday() >= 5, + "is_vacation": cursor in display_vacation_dates, + "special_status": display_special_status_map.get(cursor), + "overtime_adjustment_minutes": ( + display_overtime_adjustment_map[cursor].minutes if cursor in display_overtime_adjustment_map else 0 + ), + } + ) + cursor += timedelta(days=1) + + weeks.append( + { + "week_start": week_start, + "week_end": week_end, + "iso_week": week_start.isocalendar()[1], + "days": week_days, + "weekly_ist": weekly_ist, + "weekly_soll": weekly_soll, + "weekly_delta": weekly_delta, + "vacation_days": vacation_days_visible, + "is_vacation_week": range_is_full_vacation( + week_start, + week_end, + vacation_dates=display_vacation_dates, + relevant_weekdays=working_days, + ), + } + ) + + previous_month = (month_start.replace(day=1) - timedelta(days=1)).replace(day=1) + next_month_value = next_month + workhours_target_warning = build_workhours_target_warning(db=db, user=user) + maybe_send_workhours_target_warning_email(db=db, user=user, warning=workhours_target_warning) + + return templates.TemplateResponse( + "pages/month.html", + build_context( + request, + user=user, + db=db, + month_start=month_start, + month_end=month_end, + month_value=month_start.strftime("%Y-%m"), + view_mode=view_mode, + weeks=weeks, + month_ist=month_ist, + month_soll=month_soll, + month_delta=month_delta, + previous_month=previous_month, + next_month=next_month_value, + monthly_soll_mode="summe_anteilig_nach_monatstagen", + workhours_target_warning=workhours_target_warning, + ), + ) + + @app.get("/settings", response_class=HTMLResponse) + async def settings_page( + request: Request, + msg: str | None = Query(default=None), + tab: str = Query(default="settings"), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + success_message = None + error_message = None + if msg == "profile_updated": + success_message = "Profil aktualisiert." + elif msg == "password_updated": + success_message = "Passwort aktualisiert." + elif msg == "preferences_updated": + success_message = "Einstellungen gespeichert." + elif msg == "vacation_added": + success_message = "Urlaubszeitraum hinzugefuegt." + elif msg == "vacation_deleted": + success_message = "Urlaubszeitraum entfernt." + elif msg == "overtime_updated": + success_message = "Ueberstunden-Regeln gespeichert." + elif msg == "workdays_updated": + success_message = "Arbeitstage gespeichert." + elif msg == "vacation_allowance_updated": + success_message = "Urlaubstage gespeichert." + elif msg == "workhours_counter_updated": + success_message = "Arbeitsstunden-Counter gespeichert." + elif msg == "weekly_target_updated": + success_message = "Wochenstunden gespeichert." + elif msg == "mfa_updated": + success_message = "2FA-Einstellungen gespeichert." + elif msg == "admin_user_updated": + success_message = "Benutzer aktualisiert." + elif msg == "admin_user_deleted": + success_message = "Benutzer geloescht." + elif msg == "account_deleted": + success_message = "Dein Konto und alle zugehörigen Daten wurden gelöscht." + elif msg == "smtp_updated": + success_message = "E-Mail-Server gespeichert." + elif msg == "smtp_test_sent": + success_message = "Testmail wurde versendet." + elif msg == "site_content_updated": + success_message = "Impressum und Datenschutz wurden gespeichert." + elif msg == "ticket_updated": + success_message = "Ticket aktualisiert." + elif msg == "mfa_setup_required": + success_message = "Bitte 2FA in den Sicherheitseinstellungen mit einem Setup-Code abschließen." + elif msg == "mfa_email_unavailable": + error_message = "E-Mail-2FA konnte nicht aktiviert werden, da kein Mailserver verfügbar ist." + + return render_settings_form( + request, + db=db, + user=user, + active_tab=tab, + success_message=success_message, + error=error_message, + ) + + @app.post("/settings/profile") + async def settings_update_profile( + request: Request, + email: str = Form(...), + federal_state: str = Form(default=""), + current_password: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + if not verify_password(current_password, user.password_hash): + return render_settings_form( + request, + db=db, + user=user, + error="Aktuelles Passwort ist ungültig.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + payload = RegisterRequest(email=email, password="validplaceholder123") + except Exception: + return render_settings_form( + request, + db=db, + user=user, + error="Ungültige E-Mail-Adresse.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + existing = find_user_by_email(db, payload.email) + if existing and existing.id != user.id: + return render_settings_form( + request, + db=db, + user=user, + error="E-Mail ist bereits vergeben.", + status_code=status.HTTP_409_CONFLICT, + ) + + normalized_state = normalize_german_state_code(federal_state) + if federal_state.strip() and normalized_state is None: + return render_settings_form( + request, + db=db, + user=user, + error="Ungültiges Bundesland ausgewählt.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + new_email = payload.email.lower() + email_changed = new_email != user.email.lower() + verification_required_for_new_email = email_changed and is_email_verification_enabled(db) + + user.email = new_email + user.federal_state = normalized_state + if email_changed: + user.email_verification_token_hash = None + user.email_verification_expires_at = None + user.email_verification_sent_at = None + if verification_required_for_new_email: + user.email_verified = False + + sync_start = date.today().replace(month=1, day=1) - timedelta(days=366) + sync_end = date.today().replace(month=12, day=31) + timedelta(days=730) + try: + sync_auto_holidays_for_user(db=db, user=user, from_date=sync_start, to_date=sync_end) + except Exception: + logger.exception("holiday_sync_failed email=%s state=%s", user.email, user.federal_state) + return render_settings_form( + request, + db=db, + user=user, + error="Feiertage konnten nicht automatisch synchronisiert werden.", + status_code=status.HTTP_502_BAD_GATEWAY, + ) + db.commit() + + if verification_required_for_new_email: + sent, reason = send_email_verification_link(request=request, db=db, user=user, force=True) + logger.info("profile_email_changed_requires_verification email=%s sent=%s reason=%s", user.email, sent, reason) + request.session.clear() + if not sent: + return RedirectResponse(url="/login?msg=email_verification_send_failed", status_code=status.HTTP_303_SEE_OTHER) + return RedirectResponse(url="/login?msg=email_verification_sent", status_code=status.HTTP_303_SEE_OTHER) + + return RedirectResponse(url="/settings?msg=profile_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/password") + async def settings_update_password( + request: Request, + current_password: str = Form(...), + new_password: str = Form(...), + new_password_repeat: str = Form(...), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + if not verify_password(current_password, user.password_hash): + return render_settings_form( + request, + db=db, + user=user, + error="Aktuelles Passwort ist ungueltig.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if new_password != new_password_repeat: + return render_settings_form( + request, + db=db, + user=user, + error="Neue Passwoerter stimmen nicht ueberein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if len(new_password) < 10: + return render_settings_form( + request, + db=db, + user=user, + error="Neues Passwort muss mindestens 10 Zeichen haben.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.password_hash = hash_password(new_password) + invalidate_password_reset_tokens(db=db, user_id=user.id) + db.commit() + return RedirectResponse(url="/settings?msg=password_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/mfa") + async def settings_update_mfa( + request: Request, + mfa_method: str = Form(...), + current_password: str = Form(...), + setup_code: str = Form(default=""), + regenerate_totp: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + if not verify_password(current_password, user.password_hash): + return render_settings_form( + request, + db=db, + user=user, + error="Aktuelles Passwort ist ungueltig.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if mfa_method not in {MFA_METHOD_NONE, MFA_METHOD_TOTP, MFA_METHOD_EMAIL}: + return render_settings_form( + request, + db=db, + user=user, + error="Ungueltige MFA-Methode.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if mfa_method == MFA_METHOD_NONE: + user.mfa_method = MFA_METHOD_NONE + user.mfa_totp_secret_encrypted = None + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + request.session.pop("mfa_setup_secret", None) + db.add(user) + db.commit() + return RedirectResponse(url="/settings?msg=mfa_updated", status_code=status.HTTP_303_SEE_OTHER) + + if mfa_method == MFA_METHOD_EMAIL: + if not resolve_mail_settings(db): + return render_settings_form( + request, + db=db, + user=user, + error="E-Mail-Server ist nicht konfiguriert. E-Mail-2FA kann nicht aktiviert werden.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + user.mfa_method = MFA_METHOD_EMAIL + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + request.session.pop("mfa_setup_secret", None) + db.add(user) + db.commit() + return RedirectResponse(url="/settings?msg=mfa_updated", status_code=status.HTTP_303_SEE_OTHER) + + # TOTP flow + if regenerate_totp == "on": + request.session.pop("mfa_setup_secret", None) + user.mfa_totp_secret_encrypted = None + user.mfa_method = MFA_METHOD_NONE + db.add(user) + db.commit() + + setup_secret = request.session.get("mfa_setup_secret") + if not setup_secret and user.mfa_totp_secret_encrypted: + # Secret exists already: method can be toggled directly. + user.mfa_method = MFA_METHOD_TOTP + db.add(user) + db.commit() + return RedirectResponse(url="/settings?msg=mfa_updated", status_code=status.HTTP_303_SEE_OTHER) + + if not setup_secret: + setup_secret = generate_totp_secret() + request.session["mfa_setup_secret"] = setup_secret + return render_settings_form( + request, + db=db, + user=user, + error="TOTP-Schluessel erstellt. Bitte in Authenticator-App hinterlegen und Code bestaetigen.", + status_code=status.HTTP_200_OK, + ) + + if not setup_code.strip(): + return render_settings_form( + request, + db=db, + user=user, + error="Bitte den 6-stelligen Code aus der Authenticator-App eingeben.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not verify_totp_code(secret=setup_secret, code=setup_code): + return render_settings_form( + request, + db=db, + user=user, + error="Authenticator-Code ist ungueltig.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.mfa_totp_secret_encrypted = encrypt_secret(fernet, setup_secret) + user.mfa_method = MFA_METHOD_TOTP + user.mfa_email_code_hash = None + user.mfa_email_code_expires_at = None + request.session.pop("mfa_setup_secret", None) + db.add(user) + db.commit() + return RedirectResponse(url="/settings?msg=mfa_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/users/{target_user_id}") + async def admin_update_user( + target_user_id: str, + request: Request, + role: str = Form(...), + is_active: str | None = Form(default=None), + reset_mfa: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + + if role not in {"user", "admin"}: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Ungueltige Rolle.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + target_user = db.execute(select(User).where(User.id == target_user_id)).scalar_one_or_none() + if not target_user: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Benutzer nicht gefunden.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + target_is_active = is_active == "on" + current_admin_count = count_admin_users(db) + if target_user.role == "admin": + removing_admin_role = role != "admin" + deactivating_admin = not target_is_active + if (removing_admin_role or deactivating_admin) and current_admin_count <= 1: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Mindestens ein aktiver Admin muss erhalten bleiben.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + target_user.role = role + target_user.is_active = target_is_active + if reset_mfa == "on": + target_user.mfa_method = MFA_METHOD_NONE + target_user.mfa_totp_secret_encrypted = None + target_user.mfa_email_code_hash = None + target_user.mfa_email_code_expires_at = None + db.add(target_user) + db.commit() + return RedirectResponse(url="/settings?tab=admin&msg=admin_user_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/users/{target_user_id}/delete") + async def admin_delete_user( + target_user_id: str, + request: Request, + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + + target_user = db.execute(select(User).where(User.id == target_user_id)).scalar_one_or_none() + if not target_user: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Benutzer nicht gefunden.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + if target_user.id == admin_user.id: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Du kannst deinen eigenen Account nicht loeschen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + current_admin_count = count_admin_users(db) + if target_user.role == "admin" and target_user.is_active and current_admin_count <= 1: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Mindestens ein aktiver Admin muss erhalten bleiben.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + db.delete(target_user) + db.commit() + return RedirectResponse(url="/settings?tab=admin&msg=admin_user_deleted", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/email-server") + async def admin_update_email_server( + request: Request, + smtp_host: str = Form(default=""), + smtp_port_value: str = Form(default="587", alias="smtp_port"), + smtp_username: str = Form(default=""), + smtp_password: str = Form(default=""), + from_email: str = Form(default=""), + from_name: str = Form(default=""), + use_starttls: str | None = Form(default=None), + use_ssl: str | None = Form(default=None), + verify_tls: str | None = Form(default=None), + registration_mails_enabled: str | None = Form(default=None), + password_reset_mails_enabled: str | None = Form(default=None), + registration_admin_notify_enabled: str | None = Form(default=None), + registration_admin_notify_admin_ids: list[str] = Form(default=[], alias="registration_admin_notify_admin_ids"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + + try: + smtp_port = int(smtp_port_value) + except ValueError: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="SMTP-Port muss numerisch sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if smtp_port < 1 or smtp_port > 65535: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="SMTP-Port ausserhalb des gueltigen Bereichs (1-65535).", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + use_starttls_value = use_starttls == "on" + use_ssl_value = use_ssl == "on" + if use_starttls_value and use_ssl_value: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Bitte nur STARTTLS oder SMTP-SSL aktivieren, nicht beides gleichzeitig.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + host = smtp_host.strip() + sender = from_email.strip().lower() + if not host or not sender: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="SMTP-Host und Absender-E-Mail sind erforderlich.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + config = get_email_config(db) + if not config: + config = EmailServerConfig() + db.add(config) + db.flush() + + config.smtp_host = host + config.smtp_port = smtp_port + config.smtp_username = smtp_username.strip() or None + if smtp_password.strip(): + config.smtp_password_encrypted = encrypt_secret(fernet, smtp_password.strip()) + config.from_email = sender + config.from_name = from_name.strip() or settings.app_name + config.use_starttls = use_starttls_value + config.use_ssl = use_ssl_value + config.verify_tls = verify_tls == "on" + config.registration_mails_enabled = registration_mails_enabled == "on" + config.password_reset_mails_enabled = password_reset_mails_enabled == "on" + config.registration_admin_notify_enabled = registration_admin_notify_enabled == "on" + selected_admin_ids = parse_admin_id_csv(",".join(registration_admin_notify_admin_ids)) + if selected_admin_ids: + active_admin_ids_stmt = select(User.id).where(User.role == "admin", User.is_active.is_(True)) + active_admin_ids = set(db.execute(active_admin_ids_stmt).scalars().all()) + invalid_selection = [admin_id for admin_id in selected_admin_ids if admin_id not in active_admin_ids] + if invalid_selection: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Ausgewaehlte Admin-Empfaenger sind ungueltig oder nicht mehr aktiv.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + config.registration_admin_notify_admin_ids_csv = ",".join(selected_admin_ids) if selected_admin_ids else None + config.updated_by_user_id = admin_user.id + db.add(config) + db.commit() + return RedirectResponse(url="/settings?tab=admin&msg=smtp_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/email-server/test") + async def admin_send_test_email( + request: Request, + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + mail_settings = resolve_mail_settings(db) + if not mail_settings: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="E-Mail-Server ist nicht vollstaendig konfiguriert.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + try: + send_email( + settings=mail_settings, + to_email=admin_user.email, + subject=f"{settings.app_name} Testmail", + text_body="Diese Testmail bestaetigt die SMTP-Konfiguration.", + ) + except Exception as exc: + logger.exception("smtp_test_mail_failed admin=%s", admin_user.email) + error_message = "Testmail konnte nicht versendet werden." + if "WRONG_VERSION_NUMBER" in str(exc): + error_message = ( + "TLS-Modus passt nicht zum SMTP-Server. Bei Port 587 bitte STARTTLS aktivieren " + "und SMTP-SSL deaktivieren." + ) + elif exc.__class__.__name__ == "SMTPAuthenticationError": + error_message = ( + "SMTP-Anmeldung fehlgeschlagen. Bitte Benutzername/Passwort oder SMTP-Policy prüfen." + ) + elif isinstance(exc, TimeoutError) or "timed out" in str(exc).lower(): + error_message = ( + "SMTP-Server nicht erreichbar (Timeout). Bitte Host/Port/Firewall/IP-Block prüfen." + ) + elif exc.__class__.__name__ == "SMTPServerDisconnected": + error_message = ( + "SMTP-Verbindung wurde vom Server beendet. Bitte TLS-Modus und Server-Logs prüfen." + ) + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error=error_message, + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + ) + return RedirectResponse(url="/settings?tab=admin&msg=smtp_test_sent", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/site-content") + async def admin_update_site_content( + request: Request, + impressum_markdown: str = Form(default=""), + privacy_markdown: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + + upsert_site_content( + db=db, + key=SITE_CONTENT_IMPRESSUM, + markdown_text=impressum_markdown, + updated_by_user_id=admin_user.id, + ) + upsert_site_content( + db=db, + key=SITE_CONTENT_PRIVACY, + markdown_text=privacy_markdown, + updated_by_user_id=admin_user.id, + ) + db.commit() + return RedirectResponse(url="/settings?tab=admin&msg=site_content_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/admin/tickets/{ticket_id}") + async def admin_update_ticket( + request: Request, + ticket_id: str, + status_value: str = Form(..., alias="status"), + admin_notes: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + admin_user = require_admin(request, db) + verify_csrf(request, csrf_token) + + if status_value not in {SUPPORT_TICKET_STATUS_OPEN, SUPPORT_TICKET_STATUS_CLOSED}: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Ungültiger Ticketstatus.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + ticket = db.get(SupportTicket, ticket_id) + if ticket is None: + return render_settings_form( + request, + db=db, + user=admin_user, + active_tab="admin", + error="Ticket wurde nicht gefunden.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + normalized_notes = (admin_notes or "").strip()[:4000] + ticket.status = status_value + ticket.admin_notes = normalized_notes or None + ticket.closed_at = utc_now() if status_value == SUPPORT_TICKET_STATUS_CLOSED else None + db.add(ticket) + db.commit() + return RedirectResponse(url="/settings?tab=admin&msg=ticket_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/preferences") + async def settings_update_preferences( + request: Request, + preferred_home_view: str = Form(...), + preferred_month_view_mode: str = Form(...), + entry_mode: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + if preferred_home_view not in {"week", "month"}: + return render_settings_form( + request, + db=db, + user=user, + error="Ungueltige Standardansicht.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if preferred_month_view_mode not in {"flat", "weeks"}: + return render_settings_form( + request, + db=db, + user=user, + error="Ungueltige Monatsansicht.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if entry_mode and entry_mode not in {ENTRY_MODE_MANUAL, ENTRY_MODE_AUTO_UNTIL_TODAY}: + return render_settings_form( + request, + db=db, + user=user, + error="Ungueltiger Erfassungsmodus.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.preferred_home_view = preferred_home_view + user.preferred_month_view_mode = preferred_month_view_mode + new_entry_mode = entry_mode or user.entry_mode + switched_to_auto_until_today = user.entry_mode != ENTRY_MODE_AUTO_UNTIL_TODAY and new_entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY + user.entry_mode = new_entry_mode + + delete_future_auto_entries(db=db, user_id=user.id, after_date=date.today()) + if switched_to_auto_until_today: + ensure_user_has_default_target_rule(db, user) + autofill_entries_for_range( + db=db, + user=user, + range_start=user.created_at.date(), + range_end=date.today(), + ) + + db.commit() + return RedirectResponse(url="/settings?msg=preferences_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/workdays") + async def settings_update_workdays( + request: Request, + working_days_values: list[str] = Form(default=[], alias="working_days"), + count_vacation_as_worktime: str | None = Form(default=None), + count_holiday_as_worktime: str | None = Form(default=None), + count_sick_as_worktime: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + try: + working_days = parse_weekday_values(working_days_values) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + + if not working_days: + return render_settings_form( + request, + db=db, + user=user, + error="Bitte mindestens einen Arbeitstag auswaehlen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.working_days_csv = serialize_working_days(working_days) + user.count_vacation_as_worktime = count_vacation_as_worktime == "on" + user.count_holiday_as_worktime = count_holiday_as_worktime == "on" + user.count_sick_as_worktime = count_sick_as_worktime == "on" + sync_start = date.today().replace(month=1, day=1) - timedelta(days=366) + sync_end = date.today().replace(month=12, day=31) + timedelta(days=730) + try: + sync_auto_holidays_for_user(db=db, user=user, from_date=sync_start, to_date=sync_end) + except Exception: + logger.exception("holiday_sync_failed_after_workdays_update email=%s", user.email) + return render_settings_form( + request, + db=db, + user=user, + error="Feiertage konnten nicht automatisch synchronisiert werden.", + status_code=status.HTTP_502_BAD_GATEWAY, + ) + db.commit() + return RedirectResponse(url="/settings?msg=workdays_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/vacation-allowance") + async def settings_update_vacation_allowance( + request: Request, + vacation_days_total_value: str = Form(..., alias="vacation_days_total"), + vacation_show_in_header_present: str | None = Form(default=None), + vacation_show_in_header: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + try: + vacation_days_total = int(vacation_days_total_value.strip()) + except ValueError: + return render_settings_form( + request, + db=db, + user=user, + error="Gesamturlaubstage muessen eine ganze Zahl sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if vacation_days_total < 0: + return render_settings_form( + request, + db=db, + user=user, + error="Gesamturlaubstage duerfen nicht negativ sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if vacation_days_total > 365: + return render_settings_form( + request, + db=db, + user=user, + error="Gesamturlaubstage sind zu gross (maximal 365).", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.vacation_days_total = vacation_days_total + if vacation_show_in_header_present is not None: + user.vacation_show_in_header = vacation_show_in_header == "on" + db.commit() + + return RedirectResponse(url="/settings?msg=vacation_allowance_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/weekly-target") + async def settings_update_weekly_target( + request: Request, + weekly_target_hours: float = Form(...), + automatic_break_rules_enabled: str | None = Form(default=None), + default_break_minutes_value: str = Form(default="", alias="default_break_minutes"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + new_target_minutes = int(round(weekly_target_hours * 60)) + if new_target_minutes <= 0: + return render_settings_form( + request, + db=db, + user=user, + error="Wochenstunden muessen groesser als 0 sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + break_rules_enabled = automatic_break_rules_enabled == "on" + default_break_minutes = user.default_break_minutes if break_rules_enabled else 0 + if default_break_minutes_value.strip(): + try: + parsed_default_break_minutes = int(default_break_minutes_value.strip()) + except ValueError: + return render_settings_form( + request, + db=db, + user=user, + error="Tägliche Pause muss eine ganze Zahl in Minuten sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if parsed_default_break_minutes < 0: + return render_settings_form( + request, + db=db, + user=user, + error="Tägliche Pause darf nicht negativ sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + default_break_minutes = parsed_default_break_minutes + + ensure_user_has_default_target_rule(db, user) + apply_weekly_target_change( + db, + user=user, + selected_week_start=monday_of(date.today()), + new_target_minutes=new_target_minutes, + scope="all_weeks", + ) + user.weekly_target_minutes = new_target_minutes + user.automatic_break_rules_enabled = break_rules_enabled + user.default_break_minutes = default_break_minutes + db.commit() + + return RedirectResponse(url="/settings?msg=weekly_target_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/workhours-counter") + async def settings_update_workhours_counter( + request: Request, + workhours_counter_enabled: str | None = Form(default=None), + workhours_counter_show_in_header: str | None = Form(default=None), + workhours_counter_start_date_value: str = Form(default="", alias="workhours_counter_start_date"), + workhours_counter_end_date_value: str = Form(default="", alias="workhours_counter_end_date"), + workhours_counter_manual_offset_hours_value: str = Form(default="", alias="workhours_counter_manual_offset_hours"), + workhours_counter_target_hours_value: str = Form(default="", alias="workhours_counter_target_hours"), + workhours_counter_target_email_enabled: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + enabled = workhours_counter_enabled == "on" + show_in_header = workhours_counter_show_in_header == "on" + target_email_enabled = workhours_counter_target_email_enabled == "on" + start_date_value = workhours_counter_start_date_value.strip() + end_date_value = workhours_counter_end_date_value.strip() + manual_offset_hours_value = workhours_counter_manual_offset_hours_value.strip() + target_hours_value = workhours_counter_target_hours_value.strip() + start_date = None + end_date = None + manual_offset_minutes = 0 + target_minutes: int | None = None + + if start_date_value: + try: + start_date = parse_date_query(start_date_value) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + if end_date_value: + try: + end_date = parse_date_query(end_date_value) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + + if enabled: + if start_date is None or end_date is None: + return render_settings_form( + request, + db=db, + user=user, + error="Bitte Start- und Enddatum fuer den Arbeitsstunden-Counter setzen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if end_date < start_date: + return render_settings_form( + request, + db=db, + user=user, + error="Enddatum darf nicht vor dem Startdatum liegen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if manual_offset_hours_value: + try: + manual_offset_hours = float(manual_offset_hours_value.replace(",", ".")) + except ValueError: + return render_settings_form( + request, + db=db, + user=user, + error="Zusatzstunden müssen eine Zahl sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + manual_offset_minutes = int(round(manual_offset_hours * 60)) + if manual_offset_minutes < 0: + return render_settings_form( + request, + db=db, + user=user, + error="Zusatzstunden dürfen nicht negativ sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if target_hours_value: + try: + target_hours = float(target_hours_value.replace(",", ".")) + except ValueError: + return render_settings_form( + request, + db=db, + user=user, + error="Stundenziel muss eine Zahl sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + target_minutes = int(round(target_hours * 60)) + if target_minutes <= 0: + return render_settings_form( + request, + db=db, + user=user, + error="Stundenziel muss größer als 0 sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if target_email_enabled and resolve_mail_settings(db) is None: + return render_settings_form( + request, + db=db, + user=user, + error="E-Mail-Warnungen sind erst verfügbar, wenn ein E-Mail-Server konfiguriert ist.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.workhours_counter_enabled = enabled + user.workhours_counter_show_in_header = show_in_header and enabled + user.workhours_counter_start_date = start_date + user.workhours_counter_end_date = end_date + user.workhours_counter_manual_offset_minutes = manual_offset_minutes + user.workhours_counter_target_minutes = target_minutes + user.workhours_counter_target_email_enabled = target_email_enabled and target_minutes is not None and enabled + user.workhours_counter_warning_last_sent_on = None + user.workhours_counter_warning_last_sent_key = None + db.commit() + return RedirectResponse(url="/settings?msg=workhours_counter_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/overtime") + async def settings_update_overtime( + request: Request, + overtime_start_date_value: str = Form(default="", alias="overtime_start_date"), + overtime_expiry_days_value: str = Form(default="", alias="overtime_expiry_days"), + expire_negative_overtime: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + overtime_start_date = None + if overtime_start_date_value.strip(): + try: + overtime_start_date = parse_date_query(overtime_start_date_value.strip()) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + + overtime_expiry_days = None + if overtime_expiry_days_value.strip(): + try: + overtime_expiry_days = int(overtime_expiry_days_value.strip()) + except ValueError: + return render_settings_form( + request, + db=db, + user=user, + error="Verfall muss eine ganze Zahl in Tagen sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if overtime_expiry_days <= 0: + return render_settings_form( + request, + db=db, + user=user, + error="Verfall muss groesser als 0 sein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + if overtime_expiry_days > 3650: + return render_settings_form( + request, + db=db, + user=user, + error="Verfall ist zu gross (maximal 3650 Tage).", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + user.overtime_start_date = overtime_start_date + user.overtime_expiry_days = overtime_expiry_days + user.expire_negative_overtime = expire_negative_overtime == "on" + db.commit() + + return RedirectResponse(url="/settings?msg=overtime_updated", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/vacations/add") + async def settings_add_vacation( + request: Request, + start_date_value: str = Form(..., alias="start_date"), + end_date_value: str = Form(..., alias="end_date"), + include_weekends: str | None = Form(default=None), + notes: str = Form(default=""), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + try: + start_date = parse_date_query(start_date_value) + end_date = parse_date_query(end_date_value) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + if end_date < start_date: + return render_settings_form( + request, + db=db, + user=user, + error="Enddatum darf nicht vor dem Startdatum liegen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + include_weekends_value = include_weekends == "on" + notes_value = notes.strip() or None + if include_weekends_value: + period = VacationPeriod( + user_id=user.id, + start_date=start_date, + end_date=end_date, + include_weekends=True, + notes=notes_value, + ) + db.add(period) + else: + working_days = get_user_working_days(user) + add_vacation_for_weekdays( + db=db, + user_id=user.id, + start_date=start_date, + end_date=end_date, + relevant_weekdays=working_days, + notes=notes_value, + ) + db.commit() + + return RedirectResponse(url="/settings?msg=vacation_added", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/vacations/delete-range") + async def settings_delete_vacation_range( + request: Request, + start_date_value: str = Form(..., alias="start_date"), + end_date_value: str = Form(..., alias="end_date"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + try: + start_date = parse_date_query(start_date_value) + end_date = parse_date_query(end_date_value) + except HTTPException as exc: + return render_settings_form( + request, + db=db, + user=user, + error=exc.detail, + status_code=exc.status_code, + ) + if end_date < start_date: + return render_settings_form( + request, + db=db, + user=user, + error="Enddatum darf nicht vor dem Startdatum liegen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + remove_vacation_range( + db=db, + user_id=user.id, + start_date=start_date, + end_date=end_date, + ) + db.commit() + return RedirectResponse(url="/settings?msg=vacation_deleted", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/settings/vacations/{vacation_id}/delete") + async def settings_delete_vacation( + vacation_id: str, + request: Request, + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + stmt = select(VacationPeriod).where(VacationPeriod.id == vacation_id, VacationPeriod.user_id == user.id) + vacation = db.execute(stmt).scalar_one_or_none() + if not vacation: + return render_settings_form( + request, + db=db, + user=user, + error="Urlaubszeitraum nicht gefunden.", + status_code=status.HTTP_404_NOT_FOUND, + ) + + db.delete(vacation) + db.commit() + return RedirectResponse(url="/settings?msg=vacation_deleted", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/vacation/day/toggle") + async def toggle_day_vacation( + request: Request, + date_value: str = Form(..., alias="date"), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + selected_date = parse_date_query(date_value) + working_days = get_user_working_days(user) + vacations = list_vacations_for_user(db, user.id, selected_date, selected_date) + vacation_dates = expand_vacation_dates( + vacations, + selected_date, + selected_date, + relevant_weekdays=working_days, + ) + + if selected_date in vacation_dates: + remove_vacation_range(db=db, user_id=user.id, start_date=selected_date, end_date=selected_date) + else: + clear_special_status_for_date(db=db, user_id=user.id, day=selected_date) + add_vacation_range( + db=db, + user_id=user.id, + start_date=selected_date, + end_date=selected_date, + include_weekends=True, + notes="Schneller Urlaubseintrag", + ) + + db.commit() + + destination = return_to if return_to.startswith("/") else "/dashboard" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/vacation/week/toggle") + async def toggle_week_vacation( + request: Request, + week_start_value: str = Form(..., alias="week_start"), + week_end_value: str = Form(..., alias="week_end"), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + week_start = parse_date_query(week_start_value) + week_end = parse_date_query(week_end_value) + if week_end < week_start: + raise HTTPException(status_code=400, detail="Ungueltiger Wochenbereich") + + working_days = get_user_working_days(user) + vacations = list_vacations_for_user(db, user.id, week_start, week_end) + vacation_dates = expand_vacation_dates( + vacations, + week_start, + week_end, + relevant_weekdays=working_days, + ) + already_full = range_is_full_vacation( + week_start, + week_end, + vacation_dates=vacation_dates, + relevant_weekdays=working_days, + ) + + if already_full: + remove_vacation_range(db=db, user_id=user.id, start_date=week_start, end_date=week_end) + else: + cursor = week_start + while cursor <= week_end: + if cursor.weekday() in working_days: + clear_special_status_for_date(db=db, user_id=user.id, day=cursor) + cursor += timedelta(days=1) + add_vacation_for_weekdays( + db=db, + user_id=user.id, + start_date=week_start, + end_date=week_end, + relevant_weekdays=working_days, + notes="Schneller Urlaubseintrag Woche", + ) + db.commit() + + destination = return_to if return_to.startswith("/") else "/dashboard" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/special-day/toggle") + async def toggle_special_day( + request: Request, + date_value: str = Form(..., alias="date"), + status_value: str = Form(..., alias="status"), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + selected_date = parse_date_query(date_value) + if status_value not in SPECIAL_DAY_STATUS_LABELS: + raise HTTPException(status_code=400, detail="Ungueltiger Sonderstatus") + + existing_entry_stmt = select(TimeEntry).where(TimeEntry.user_id == user.id, TimeEntry.date == selected_date) + existing_entry = db.execute(existing_entry_stmt).scalar_one_or_none() + if existing_entry: + destination = return_to if return_to.startswith("/") else "/dashboard" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + existing_status_stmt = select(SpecialDayStatus).where( + SpecialDayStatus.user_id == user.id, + SpecialDayStatus.date == selected_date, + ) + existing_status = db.execute(existing_status_stmt).scalar_one_or_none() + + if existing_status and existing_status.status == status_value: + db.delete(existing_status) + else: + remove_vacation_range(db=db, user_id=user.id, start_date=selected_date, end_date=selected_date) + if existing_status: + existing_status.status = status_value + existing_status.notes = f"Schneller Sonderstatus: {SPECIAL_DAY_STATUS_LABELS[status_value]}" + else: + db.add( + SpecialDayStatus( + user_id=user.id, + date=selected_date, + status=status_value, + notes=f"Schneller Sonderstatus: {SPECIAL_DAY_STATUS_LABELS[status_value]}", + ) + ) + + db.commit() + destination = return_to if return_to.startswith("/") else "/dashboard" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/overtime-adjustment/set") + async def set_overtime_adjustment( + request: Request, + date_value: str = Form(..., alias="date"), + adjustment_mode: str = Form(default="manual"), + adjustment_value: str = Form(default=""), + interval_start_time: str = Form(default=""), + interval_end_time: str = Form(default=""), + interval_direction: str = Form(default="negative"), + full_day_direction: str = Form(default="negative"), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + selected_date = parse_date_query(date_value) + destination = return_to if return_to.startswith("/") else f"/overtime-adjustment/edit?date={selected_date.isoformat()}" + + existing_entry_stmt = select(TimeEntry).where(TimeEntry.user_id == user.id, TimeEntry.date == selected_date) + existing_entry = db.execute(existing_entry_stmt).scalar_one_or_none() + if existing_entry: + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + try: + if adjustment_mode == "full_day": + if full_day_direction not in {"positive", "negative"}: + raise ValueError("Ungueltige Richtung fuer Tages-Stundenausgleich") + minutes = full_day_adjustment_minutes( + db=db, + user=user, + selected_date=selected_date, + positive=full_day_direction == "positive", + ) + note = ( + f"{OVERTIME_ADJUSTMENT_LABEL}: ganzer Tag " + f"({'+' if minutes > 0 else '-'}{minutes_to_hhmm(abs(minutes))})" + ) + elif adjustment_mode == "interval": + if interval_direction not in {"positive", "negative"}: + raise ValueError("Ungueltige Richtung fuer Stundenausgleich") + start_minutes = parse_time_to_minutes(interval_start_time) + end_minutes = parse_time_to_minutes(interval_end_time) + if end_minutes <= start_minutes: + raise ValueError("Die Endzeit muss nach der Startzeit liegen") + interval_minutes = end_minutes - start_minutes + minutes = interval_minutes if interval_direction == "positive" else -interval_minutes + note = ( + f"{OVERTIME_ADJUSTMENT_LABEL}: Zeitraum {interval_start_time} - {interval_end_time} " + f"({'+' if minutes > 0 else '-'}{minutes_to_hhmm(abs(minutes))})" + ) + else: + minutes = parse_signed_duration_to_minutes(adjustment_value) + note = f"{OVERTIME_ADJUSTMENT_LABEL}: manuell {'+' if minutes > 0 else ''}{minutes_to_hhmm(minutes)}" + except ValueError as exc: + error_params = {"date": selected_date.isoformat(), "overtime_error": str(exc)} + if return_to.startswith("/"): + error_params["return_to"] = return_to + error_query = urlencode(error_params) + return RedirectResponse(url=f"/overtime-adjustment/edit?{error_query}", status_code=status.HTTP_303_SEE_OTHER) + + existing_adjustment_stmt = select(OvertimeAdjustment).where( + OvertimeAdjustment.user_id == user.id, + OvertimeAdjustment.date == selected_date, + ) + existing_adjustment = db.execute(existing_adjustment_stmt).scalar_one_or_none() + if existing_adjustment: + existing_adjustment.minutes = minutes + existing_adjustment.notes = note + else: + db.add( + OvertimeAdjustment( + user_id=user.id, + date=selected_date, + minutes=minutes, + notes=note, + ) + ) + + db.commit() + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/overtime-adjustment/clear") + async def clear_overtime_adjustment( + request: Request, + date_value: str = Form(..., alias="date"), + return_to: str = Form(default="/dashboard"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + verify_csrf(request, csrf_token) + + selected_date = parse_date_query(date_value) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=selected_date) + db.commit() + + destination = return_to if return_to.startswith("/") else "/dashboard" + return RedirectResponse(url=destination, status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/bulk-entry", response_class=HTMLResponse) + async def bulk_entry_form( + request: Request, + from_date_value: str | None = Query(default=None, alias="from"), + to_date_value: str | None = Query(default=None, alias="to"), + weekdays_value: str | None = Query(default="0,1,2,3,4", alias="weekdays"), + bulk_mode: str | None = Query(default="only_missing", alias="mode"), + created: int | None = Query(default=None), + updated: int | None = Query(default=None), + skipped: int | None = Query(default=None), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + today = date.today() + default_from = monday_of(today - timedelta(days=7)) + default_to = monday_of(today) + timedelta(days=6) + + parsed_from = parse_date_query(from_date_value, default=default_from).isoformat() + parsed_to = parse_date_query(to_date_value, default=default_to).isoformat() + + weekday_parts = [item for item in (weekdays_value or "").split(",") if item] + try: + weekdays_selected = parse_weekday_values(weekday_parts) if weekday_parts else [0, 1, 2, 3, 4] + except HTTPException: + weekdays_selected = [0, 1, 2, 3, 4] + + if bulk_mode not in {"only_missing", "upsert"}: + bulk_mode = "only_missing" + + success_message = None + if created is not None and updated is not None and skipped is not None: + success_message = f"Mehrfacheingabe gespeichert: {created} angelegt, {updated} aktualisiert, {skipped} uebersprungen." + + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=parsed_from, + to_date_value=parsed_to, + weekdays_selected=weekdays_selected, + bulk_mode=bulk_mode, + start_time="08:30", + end_time="15:00", + break_minutes=automatic_break_minutes(start_minutes=8 * 60 + 30, end_minutes=15 * 60) + if auto_break_rules_enabled(user) + else default_break_minutes_for_user(user), + break_mode="auto" if auto_break_rules_enabled(user) else "manual", + notes="", + success_message=success_message, + ) + + @app.post("/bulk-entry", response_class=HTMLResponse) + async def bulk_entry_submit( + request: Request, + from_date_value: str = Form(..., alias="from_date"), + to_date_value: str = Form(..., alias="to_date"), + weekdays_values: list[str] = Form(default=[]), + start_time: str = Form(...), + end_time: str = Form(...), + break_minutes: int = Form(default=0), + break_mode: str = Form(default="manual"), + notes: str = Form(default=""), + bulk_mode: str = Form(..., alias="mode"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + + try: + parsed_mode = parse_bulk_mode(bulk_mode) + from_date = parse_date_query(from_date_value) + to_date = parse_date_query(to_date_value) + weekdays_selected = parse_weekday_values(weekdays_values) + start_minutes = parse_time_to_minutes(start_time) + end_minutes = parse_time_to_minutes(end_time) + effective_break_minutes, effective_break_mode = resolve_break_settings( + user=user, + start_minutes=start_minutes, + end_minutes=end_minutes, + submitted_break_minutes=break_minutes, + submitted_break_mode=break_mode, + ) + compute_net_minutes(start_minutes, end_minutes, effective_break_minutes) + except HTTPException as exc: + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=from_date_value, + to_date_value=to_date_value, + weekdays_selected=[0, 1, 2, 3, 4], + bulk_mode=bulk_mode if bulk_mode in {"only_missing", "upsert"} else "only_missing", + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + error=exc.detail, + status_code=exc.status_code, + ) + except Exception as exc: + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=from_date_value, + to_date_value=to_date_value, + weekdays_selected=[0, 1, 2, 3, 4], + bulk_mode=bulk_mode if bulk_mode in {"only_missing", "upsert"} else "only_missing", + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + error=str(exc), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if from_date > to_date: + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=from_date_value, + to_date_value=to_date_value, + weekdays_selected=weekdays_selected, + bulk_mode=parsed_mode, + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + error="Von-Datum darf nicht nach dem Bis-Datum liegen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if (to_date - from_date).days > 370: + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=from_date_value, + to_date_value=to_date_value, + weekdays_selected=weekdays_selected, + bulk_mode=parsed_mode, + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + error="Zeitraum ist zu gross. Bitte maximal 12 Monate auf einmal bearbeiten.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if not weekdays_selected: + return render_bulk_form( + request, + db=db, + user=user, + from_date_value=from_date_value, + to_date_value=to_date_value, + weekdays_selected=weekdays_selected, + bulk_mode=parsed_mode, + start_time=start_time, + end_time=end_time, + break_minutes=break_minutes, + break_mode=break_mode, + notes=notes, + error="Bitte mindestens einen Wochentag auswaehlen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + existing_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= from_date, TimeEntry.date <= to_date) + .order_by(TimeEntry.date.asc()) + ) + existing_entries = db.execute(existing_stmt).scalars().all() + existing_by_date = {entry.date: entry for entry in existing_entries} + + created_count = 0 + updated_count = 0 + skipped_count = 0 + + cursor = from_date + while cursor <= to_date: + if cursor.weekday() not in weekdays_selected: + cursor += timedelta(days=1) + continue + + existing_entry = existing_by_date.get(cursor) + if existing_entry and parsed_mode == "only_missing": + skipped_count += 1 + cursor += timedelta(days=1) + continue + + if existing_entry: + existing_entry.start_minutes = start_minutes + existing_entry.end_minutes = end_minutes + existing_entry.break_minutes = effective_break_minutes + existing_entry.break_rule_mode = effective_break_mode + existing_entry.notes = notes.strip() or None + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=cursor) + updated_count += 1 + else: + new_entry = TimeEntry( + user_id=user.id, + date=cursor, + start_minutes=start_minutes, + end_minutes=end_minutes, + break_minutes=effective_break_minutes, + break_rule_mode=effective_break_mode, + notes=notes.strip() or None, + ) + db.add(new_entry) + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=cursor) + created_count += 1 + + clear_special_status_for_date(db=db, user_id=user.id, day=cursor) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=cursor) + cursor += timedelta(days=1) + + db.commit() + logger.info( + "bulk_entry email=%s from=%s to=%s mode=%s created=%s updated=%s skipped=%s", + user.email, + from_date.isoformat(), + to_date.isoformat(), + parsed_mode, + created_count, + updated_count, + skipped_count, + ) + + params = urlencode( + { + "from": from_date.isoformat(), + "to": to_date.isoformat(), + "mode": parsed_mode, + "weekdays": ",".join(str(day) for day in weekdays_selected), + "created": str(created_count), + "updated": str(updated_count), + "skipped": str(skipped_count), + } + ) + return RedirectResponse(url=f"/bulk-entry?{params}", status_code=status.HTTP_303_SEE_OTHER) + + @app.get("/export", response_class=HTMLResponse) + async def export_form(request: Request, db: Session = Depends(get_db)): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + today = date.today() + first_day_of_month = date(today.year, today.month, 1) + return templates.TemplateResponse( + "pages/export.html", + build_context( + request, + user=user, + db=db, + from_date=first_day_of_month.isoformat(), + to_date=today.isoformat(), + error=None, + ), + ) + + @app.post("/export") + async def export_data( + request: Request, + from_date_value: str = Form(..., alias="from_date"), + to_date_value: str = Form(..., alias="to_date"), + export_format: str = Form(..., alias="format"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + ensure_user_has_default_target_rule(db, user) + db.commit() + + try: + from_date = parse_date_query(from_date_value) + to_date = parse_date_query(to_date_value) + except HTTPException as exc: + return templates.TemplateResponse( + "pages/export.html", + build_context( + request, + user=user, + db=db, + from_date=from_date_value, + to_date=to_date_value, + error=exc.detail, + ), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if from_date > to_date: + return templates.TemplateResponse( + "pages/export.html", + build_context( + request, + user=user, + db=db, + from_date=from_date_value, + to_date=to_date_value, + error="Von-Datum darf nicht nach dem Bis-Datum liegen.", + ), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if export_format not in {"xlsx", "pdf"}: + raise HTTPException(status_code=400, detail="Ungueltiges Exportformat") + rows, week_summaries, totals = build_export_payload_for_range( + db=db, + user=user, + from_date=from_date, + to_date=to_date, + ) + title = "Stundenexport" + + if export_format == "xlsx": + payload = create_excel_export(rows, week_summaries, totals, title) + filename = f"stunden_{from_date.isoformat()}_{to_date.isoformat()}.xlsx" + media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + else: + payload = create_pdf_export(rows, week_summaries, totals, title) + filename = f"stunden_{from_date.isoformat()}_{to_date.isoformat()}.pdf" + media_type = "application/pdf" + + response = Response(content=payload, media_type=media_type) + response.headers["Content-Disposition"] = f'attachment; filename=\"{filename}\"' + return response + + @app.post("/settings/export-all") + async def settings_export_all_data( + request: Request, + export_format: str = Form(..., alias="format"), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = get_current_user(request, db) + if not user: + return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER) + + verify_csrf(request, csrf_token) + ensure_user_has_default_target_rule(db, user) + db.commit() + + if export_format not in {"xlsx", "pdf", "backup_json"}: + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Ungültiges Exportformat.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if export_format == "backup_json": + payload = create_backup_export(build_user_backup_payload(db=db, user=user)) + filename = f"stundenfuchs_backup_{date.today().isoformat()}.json" + media_type = "application/json" + else: + from_date, to_date = user_export_date_bounds(db, user) + rows, week_summaries, totals = build_export_payload_for_range( + db=db, + user=user, + from_date=from_date, + to_date=to_date, + ) + title = "Stundenexport" + if export_format == "xlsx": + payload = create_excel_export(rows, week_summaries, totals, title) + filename = f"stunden_gesamt_{from_date.isoformat()}_{to_date.isoformat()}.xlsx" + media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + else: + payload = create_pdf_export(rows, week_summaries, totals, title) + filename = f"stunden_gesamt_{from_date.isoformat()}_{to_date.isoformat()}.pdf" + media_type = "application/pdf" + + response = Response(content=payload, media_type=media_type) + response.headers["Content-Disposition"] = f'attachment; filename=\"{filename}\"' + return response + + @app.post("/settings/import/preview") + async def settings_import_preview( + request: Request, + import_mode: str = Form(default="merge"), + backup_file: UploadFile | None = File(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + verify_csrf(request, csrf_token) + + if import_mode not in {"merge", "replace_user_data"}: + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Ungültiger Importmodus.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if backup_file is None or not (backup_file.filename or "").strip(): + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected=import_mode, + error="Bitte wähle eine Backup-Datei aus.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + payload = load_backup_payload_from_bytes(await backup_file.read()) + preview = create_import_preview_record(db=db, user=user, payload=payload, mode=import_mode) + import_preview = import_preview_view_data(db=db, user=user, preview=preview, payload=payload) + db.commit() + except BackupImportError as exc: + db.rollback() + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected=import_mode, + error=str(exc), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected=import_mode, + import_preview=import_preview, + success_message="Backup geprüft. Bitte kontrolliere die Vorschau vor dem Import.", + ) + + @app.post("/settings/import/execute") + async def settings_import_execute( + request: Request, + preview_id: str = Form(...), + confirm_replace: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + verify_csrf(request, csrf_token) + + preview = get_import_preview_record(db=db, user=user, preview_id=preview_id) + if preview is None: + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Die Importvorschau ist abgelaufen oder nicht mehr verfügbar.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + payload = parse_preview_payload(preview) + except BackupImportError as exc: + db.delete(preview) + db.commit() + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error=str(exc), + status_code=status.HTTP_400_BAD_REQUEST, + ) + import_preview = import_preview_view_data(db=db, user=user, preview=preview, payload=payload) + if preview.mode == IMPORT_MODE_REPLACE and confirm_replace != "on": + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected=preview.mode, + import_preview=import_preview, + error="Bitte bestätige, dass deine bisherigen Daten ersetzt werden sollen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + try: + result = execute_backup_import(db=db, user=user, payload=payload, mode=preview.mode) + sync_auto_holidays_for_user( + db=db, + user=user, + from_date=date.today().replace(month=1, day=1) - timedelta(days=366), + to_date=date.today().replace(month=12, day=31) + timedelta(days=730), + ) + autofill_entries_for_range( + db=db, + user=user, + range_start=date(1970, 1, 1), + range_end=date.today(), + ) + db.delete(preview) + db.commit() + except BackupImportError as exc: + db.rollback() + preview = get_import_preview_record(db=db, user=user, preview_id=preview_id) + import_preview = None + if preview is not None: + import_preview = import_preview_view_data(db=db, user=user, preview=preview, payload=payload) + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected=preview.mode if preview else "merge", + import_preview=import_preview, + error=str(exc), + status_code=status.HTTP_400_BAD_REQUEST, + ) + + created_total = sum(result["created"].values()) + skipped_total = sum(result["skipped"].values()) + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + import_mode_selected="merge", + success_message=( + f"Backup importiert. {created_total} Datensätze übernommen, " + f"{skipped_total} Konflikte übersprungen." + ), + ) + + @app.post("/settings/account/delete") + async def settings_delete_own_account( + request: Request, + current_password: str = Form(...), + confirm_email: str = Form(...), + confirm_delete: str | None = Form(default=None), + csrf_token: str = Form(...), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + verify_csrf(request, csrf_token) + + if not verify_password(current_password, user.password_hash): + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Aktuelles Passwort ist nicht korrekt.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if confirm_email.strip().lower() != user.email.lower(): + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Bitte gib zur Bestätigung genau deine E-Mail-Adresse ein.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if confirm_delete != "on": + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Bitte bestätige, dass dein Konto dauerhaft gelöscht werden soll.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if user.role == "admin" and user.is_active and count_admin_users(db) <= 1: + return render_settings_form( + request, + db=db, + user=user, + active_tab="settings", + error="Der letzte aktive Admin kann seinen Account nicht selbst löschen.", + status_code=status.HTTP_400_BAD_REQUEST, + ) + + request.session.clear() + db.delete(user) + db.commit() + return RedirectResponse(url="/login?msg=account_deleted", status_code=status.HTTP_303_SEE_OTHER) + + @app.post("/auth/register") + async def api_register( + request: Request, + payload: RegisterRequest, + db: Session = Depends(get_db), + ): + existing = find_user_by_email(db, payload.email) + if existing: + raise HTTPException(status_code=409, detail="E-Mail ist bereits registriert") + verification_enabled = is_email_verification_enabled(db) + + user = User( + email=payload.email.lower(), + password_hash=hash_password(payload.password), + role="admin" if is_bootstrap_admin_identity(payload.email) else "user", + email_verified=not verification_enabled, + ) + db.add(user) + db.commit() + db.refresh(user) + ensure_user_has_default_target_rule(db, user) + db.commit() + send_registration_admin_notification(db=db, user=user, source="api_register") + + if verification_enabled: + sent, reason = send_email_verification_link(request=request, db=db, user=user) + logger.info("api_register_pending_verification email=%s sent=%s reason=%s", user.email, sent, reason) + if not sent and reason != "rate_limited": + raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Bestaetigungs-E-Mail konnte nicht versendet werden") + return {"ok": True, "email_verification_required": True} + + send_registration_email_if_enabled(db=db, user=user) + + csrf_token = login_user(request, user) + logger.info("register_success email=%s", user.email) + return user_public_payload(user, csrf_token) + + @app.post("/auth/login") + async def api_login( + request: Request, + payload: LoginRequest, + db: Session = Depends(get_db), + ): + client_ip = get_client_ip(request) + blocked, retry_minutes = is_login_blocked( + db, + payload.email, + client_ip, + settings.login_rate_limit_attempts, + settings.login_rate_limit_window_minutes, + ) + if blocked: + logger.warning("login_blocked email=%s ip=%s", payload.email.lower(), client_ip) + raise HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail=f"Zu viele Fehlversuche. In {retry_minutes} Minuten erneut versuchen.", + ) + + user = find_user_by_email(db, payload.email) + if not user or not user.is_active or not verify_password(payload.password, user.password_hash): + register_failed_attempt(db, payload.email, client_ip) + logger.warning("login_failed email=%s ip=%s", payload.email.lower(), client_ip) + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Ungueltige Anmeldedaten") + if settings.email_verification_required and not user.email_verified: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="E-Mail-Adresse ist noch nicht bestaetigt.") + + register_successful_attempt(db, payload.email, client_ip) + logger.info("login_success email=%s ip=%s", payload.email.lower(), client_ip) + + may_login_directly, mfa_error = start_mfa_challenge(request, db, user) + if mfa_error: + raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=mfa_error) + if not may_login_directly: + return { + "mfa_required": True, + "mfa_method": user.mfa_method, + "csrf_token": ensure_csrf_token(request), + } + + csrf_token = login_user(request, user) + response = user_public_payload(user, csrf_token) + response["mfa_required"] = False + return response + + @app.post("/auth/mfa") + async def api_login_mfa( + request: Request, + payload: MFAChallengeRequest, + db: Session = Depends(get_db), + ): + assert_api_csrf(request) + user, error = verify_pending_mfa_code(request, db, payload.code) + if error or not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=error or "Ungueltiger Code") + csrf_token = login_user(request, user) + response = user_public_payload(user, csrf_token) + response["mfa_required"] = False + return response + + @app.post("/auth/mfa/resend") + async def api_login_mfa_resend(request: Request, db: Session = Depends(get_db)): + assert_api_csrf(request) + user, pending_method = get_pending_mfa_user(request, db) + if not user or not pending_method: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Keine aktive MFA-Session") + if pending_method != MFA_METHOD_EMAIL: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Resend nur fuer E-Mail-MFA") + if email_mfa_resend_cooldown_active(user): + raise HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail="Bitte kurz warten, bevor ein neuer Code gesendet wird.", + ) + if not send_email_mfa_code(db=db, user=user): + raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Code konnte nicht versendet werden") + return {"ok": True} + + @app.post("/auth/logout") + async def api_logout(request: Request, db: Session = Depends(get_db)): + user = require_user(request, db) + csrf_token = request.headers.get("x-csrf-token") + verify_csrf(request, csrf_token) + + logger.info("logout email=%s", user.email) + request.session.clear() + return JSONResponse(status_code=200, content={"ok": True}) + + @app.get("/me") + async def api_me(request: Request, db: Session = Depends(get_db)): + user = require_user(request, db) + return user_public_payload(user, ensure_csrf_token(request)) + + def assert_api_csrf(request: Request) -> None: + csrf_token = request.headers.get("x-csrf-token") + verify_csrf(request, csrf_token) + + @app.get("/time-entries") + async def list_time_entries( + request: Request, + from_date: str | None = Query(default=None, alias="from"), + to_date: str | None = Query(default=None, alias="to"), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + + start = parse_date_query(from_date) if from_date else date.today() + end = parse_date_query(to_date) if to_date else (date.today() + timedelta(days=730)) + auto_created = autofill_entries_for_range(db=db, user=user, range_start=start, range_end=end) + if auto_created: + db.commit() + + stmt = select(TimeEntry).where(TimeEntry.user_id == user.id) + + if from_date: + stmt = stmt.where(TimeEntry.date >= start) + if to_date: + stmt = stmt.where(TimeEntry.date <= end) + + stmt = stmt.order_by(TimeEntry.date.asc()) + entries = db.execute(stmt).scalars().all() + + return {"items": [serialize_entry(entry) for entry in entries]} + + @app.post("/time-entries") + async def create_time_entry( + request: Request, + payload: TimeEntryCreate, + db: Session = Depends(get_db), + ): + user = require_user(request, db) + assert_api_csrf(request) + + start_minutes = parse_time_to_minutes(payload.start_time) + end_minutes = parse_time_to_minutes(payload.end_time) + break_minutes, break_mode = resolve_break_settings( + user=user, + start_minutes=start_minutes, + end_minutes=end_minutes, + submitted_break_minutes=payload.break_minutes, + submitted_break_mode=payload.break_mode, + ) + compute_net_minutes(start_minutes, end_minutes, break_minutes) + + entry = TimeEntry( + user_id=user.id, + date=payload.date, + start_minutes=start_minutes, + end_minutes=end_minutes, + break_minutes=break_minutes, + break_rule_mode=break_mode, + notes=payload.notes, + ) + db.add(entry) + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=payload.date) + clear_special_status_for_date(db=db, user_id=user.id, day=payload.date) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=payload.date) + + try: + db.commit() + except IntegrityError: + db.rollback() + raise HTTPException(status_code=409, detail="Eintrag fuer dieses Datum existiert bereits") + + db.refresh(entry) + return serialize_entry(entry) + + @app.patch("/time-entries/{entry_id}") + async def update_time_entry( + entry_id: str, + request: Request, + payload: TimeEntryUpdate, + db: Session = Depends(get_db), + ): + user = require_user(request, db) + assert_api_csrf(request) + entry = get_entry_or_404(db, user.id, entry_id) + + start_minutes = entry.start_minutes + end_minutes = entry.end_minutes + break_minutes = entry.break_minutes + + if payload.start_time is not None: + start_minutes = parse_time_to_minutes(payload.start_time) + if payload.end_time is not None: + end_minutes = parse_time_to_minutes(payload.end_time) + break_minutes, break_mode = resolve_break_settings( + user=user, + start_minutes=start_minutes, + end_minutes=end_minutes, + submitted_break_minutes=payload.break_minutes, + submitted_break_mode=payload.break_mode, + existing_break_mode=entry.break_rule_mode, + existing_break_minutes=entry.break_minutes, + start_or_end_changed=( + start_minutes != entry.start_minutes or end_minutes != entry.end_minutes + ), + ) + + compute_net_minutes(start_minutes, end_minutes, break_minutes) + + entry.start_minutes = start_minutes + entry.end_minutes = end_minutes + entry.break_minutes = break_minutes + entry.break_rule_mode = break_mode + if payload.notes is not None: + entry.notes = payload.notes + clear_auto_entry_skip_for_date(db=db, user_id=user.id, day=entry.date) + clear_overtime_adjustment_for_date(db=db, user_id=user.id, day=entry.date) + + db.commit() + db.refresh(entry) + + return serialize_entry(entry) + + @app.delete("/time-entries/{entry_id}") + async def delete_time_entry_api( + entry_id: str, + request: Request, + db: Session = Depends(get_db), + ): + user = require_user(request, db) + assert_api_csrf(request) + + entry = get_entry_or_404(db, user.id, entry_id) + selected_date = entry.date + db.delete(entry) + if user.entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY and selected_date <= date.today(): + mark_auto_entry_skip_for_date(db=db, user_id=user.id, day=selected_date) + db.commit() + + return {"ok": True} + + @app.get("/reports/week") + async def week_report( + request: Request, + date_value: str | None = Query(default=None, alias="date"), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + selected_date = parse_date_query(date_value, default=date.today()) + ensure_user_has_default_target_rule(db, user) + db.commit() + rules = list_rules_for_user(db, user.id) + working_days = get_user_working_days(user) + + week_start, week_end = iso_week_bounds(selected_date) + week_target_minutes = target_for_week(rules, week_start, user.weekly_target_minutes) + + auto_created = autofill_entries_for_range(db=db, user=user, range_start=week_start, range_end=week_end) + if auto_created: + db.commit() + + week_entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= week_start, TimeEntry.date <= week_end) + .order_by(TimeEntry.date.asc()) + ) + week_entries = db.execute(week_entries_stmt).scalars().all() + + all_entries_until_week_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date <= week_end) + .order_by(TimeEntry.date.asc()) + ) + all_entries_until_week = db.execute(all_entries_until_week_stmt).scalars().all() + + vacations_selected = list_vacations_for_user(db, user.id, week_start, week_end) + vacation_dates_selected = expand_vacation_dates( + vacations_selected, + week_start, + week_end, + relevant_weekdays=working_days, + ) + special_selected = list_special_statuses_for_user(db, user.id, week_start, week_end) + special_dates_selected = effective_non_working_dates_for_user(user=user, special_statuses=special_selected) + count_as_worktime_dates_selected = count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates_selected, + special_statuses=special_selected, + ) + special_by_date = special_status_map(special_selected) + overtime_adjustments_selected = list_overtime_adjustments_for_user(db, user.id, week_start, week_end) + overtime_adjustments_by_date = overtime_adjustment_map(overtime_adjustments_selected) + vacation_days_selected = len([day for day in vacation_dates_selected if day.weekday() in working_days]) + week_data = aggregate_week(week_entries, week_start, week_target_minutes) + effective_week_totals = compute_effective_week_totals( + entries=week_entries, + week_start=week_start, + weekly_target_minutes=week_target_minutes, + vacation_dates=vacation_dates_selected, + non_working_dates=special_dates_selected, + count_as_worktime_dates=count_as_worktime_dates_selected, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(overtime_adjustments_selected), + overtime_start_date=user.overtime_start_date, + relevant_weekdays=working_days, + ) + week_data["weekly_ist"] = effective_week_totals["weekly_ist"] + week_data["weekly_soll"] = effective_week_totals["weekly_soll"] + week_data["weekly_delta"] = effective_week_totals["weekly_delta"] + vacations_until_week = list_vacations_for_user(db, user.id, date(1970, 1, 1), week_end) + special_until_week = list_special_statuses_for_user(db, user.id, date(1970, 1, 1), week_end) + vacation_dates_until_week = expand_vacation_dates( + vacations_until_week, + date(1970, 1, 1), + week_end, + relevant_weekdays=working_days, + ) + overtime_adjustments_until_week = list_overtime_adjustments_for_user(db, user.id, date(1970, 1, 1), week_end) + week_data["cumulative_delta"] = compute_cumulative_overtime_minutes( + entries=all_entries_until_week, + rules=rules, + weekly_target_fallback=user.weekly_target_minutes, + vacation_periods=vacations_until_week, + non_working_dates=effective_non_working_dates_for_user(user=user, special_statuses=special_until_week), + count_as_worktime_dates=count_as_worktime_dates_for_user( + user=user, + vacation_dates=vacation_dates_until_week, + special_statuses=special_until_week, + ), + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(overtime_adjustments_until_week), + selected_week_start=week_start, + overtime_start_date=user.overtime_start_date, + overtime_expiry_days=user.overtime_expiry_days, + expire_negative_overtime=user.expire_negative_overtime, + relevant_weekdays=working_days, + ) + + return { + "week_start": week_data["week_start"].isoformat(), + "week_end": week_data["week_end"].isoformat(), + "weekly_ist_minutes": week_data["weekly_ist"], + "weekly_soll_minutes": week_data["weekly_soll"], + "weekly_delta_minutes": week_data["weekly_delta"], + "cumulative_delta_minutes": week_data["cumulative_delta"], + "vacation_days": vacation_days_selected, + "days": [ + { + "date": day_info["date"].isoformat(), + "entry": serialize_entry(day_info["entry"]) if day_info["entry"] else None, + "net_minutes": day_info["net_minutes"], + "special_status": special_by_date.get(day_info["date"]), + "overtime_adjustment_minutes": ( + overtime_adjustments_by_date[day_info["date"]].minutes + if day_info["date"] in overtime_adjustments_by_date + else 0 + ), + } + for day_info in week_data["days"] + ], + } + + @app.get("/reports/month") + async def month_report( + request: Request, + month: str = Query(...), + db: Session = Depends(get_db), + ): + user = require_user(request, db) + ensure_user_has_default_target_rule(db, user) + db.commit() + + try: + month_date = datetime.strptime(month, "%Y-%m").date() + except ValueError as exc: + raise HTTPException(status_code=400, detail="month muss YYYY-MM sein") from exc + + month_start = date(month_date.year, month_date.month, 1) + if month_start.month == 12: + next_month = date(month_start.year + 1, 1, 1) + else: + next_month = date(month_start.year, month_start.month + 1, 1) + month_end = next_month - timedelta(days=1) + + auto_created = autofill_entries_for_range(db=db, user=user, range_start=month_start, range_end=month_end) + if auto_created: + db.commit() + + entries_stmt = ( + select(TimeEntry) + .where(TimeEntry.user_id == user.id, TimeEntry.date >= month_start, TimeEntry.date <= month_end) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(entries_stmt).scalars().all() + + month_ist = 0 + month_soll = 0 + days = [] + entry_map = {entry.date: entry for entry in entries} + rules = list_rules_for_user(db, user.id) + working_days = get_user_working_days(user) + + cursor = month_start + displayed_week_starts = set() + month_special = list_special_statuses_for_user(db, user.id, month_start, month_end) + month_special_dates = effective_non_working_dates_for_user(user=user, special_statuses=month_special) + month_special_by_date = special_status_map(month_special) + month_overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, month_start, month_end) + month_overtime_adjustment_map = overtime_adjustment_map(month_overtime_adjustments) + while cursor <= month_end: + week_start = monday_of(cursor) + displayed_week_starts.add(week_start) + + entry = entry_map.get(cursor) + if entry: + net_minutes = compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + days.append( + { + "date": cursor.isoformat(), + "entry": serialize_entry(entry), + "net_minutes": net_minutes, + "special_status": month_special_by_date.get(cursor), + "overtime_adjustment_minutes": ( + month_overtime_adjustment_map[cursor].minutes if cursor in month_overtime_adjustment_map else 0 + ), + } + ) + else: + days.append( + { + "date": cursor.isoformat(), + "entry": None, + "net_minutes": 0, + "special_status": month_special_by_date.get(cursor), + "overtime_adjustment_minutes": ( + month_overtime_adjustment_map[cursor].minutes if cursor in month_overtime_adjustment_map else 0 + ), + } + ) + cursor += timedelta(days=1) + + ordered_week_starts = sorted(displayed_week_starts) + month_vacations = list_vacations_for_user(db, user.id, month_start, month_end) + month_vacation_dates = expand_vacation_dates( + month_vacations, + month_start, + month_end, + relevant_weekdays=working_days, + ) + month_count_as_worktime_dates = count_as_worktime_dates_for_user( + user=user, + vacation_dates=month_vacation_dates, + special_statuses=month_special, + ) + month_delta = 0 + weekly_breakdown = [] + for week_start in ordered_week_starts: + week_end = week_start + timedelta(days=6) + visible_start = max(week_start, month_start) + visible_end = min(week_end, month_end) + weekly_target_minutes = target_for_week(rules, week_start, user.weekly_target_minutes) + week_totals = compute_effective_span_totals( + entries=entries, + range_start=visible_start, + range_end=visible_end, + weekly_target_minutes=weekly_target_minutes, + vacation_dates=month_vacation_dates, + non_working_dates=month_special_dates, + count_as_worktime_dates=month_count_as_worktime_dates, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_map(month_overtime_adjustments), + overtime_start_date=user.overtime_start_date, + relevant_weekdays=working_days, + ) + weekly_ist = week_totals["ist_minutes"] + weekly_soll = week_totals["soll_minutes"] + weekly_delta = week_totals["delta_minutes"] + vacation_days_visible = week_totals["vacation_workdays"] + month_ist += weekly_ist + month_soll += weekly_soll + month_delta += weekly_delta + weekly_breakdown.append( + { + "week_start": week_start.isoformat(), + "week_end": week_end.isoformat(), + "ist_minutes": weekly_ist, + "soll_minutes": weekly_soll, + "delta_minutes": weekly_delta, + "vacation_days": vacation_days_visible, + "overtime_adjustment_minutes": week_totals["overtime_adjustment_minutes"], + } + ) + + return { + "month": month, + "month_start": month_start.isoformat(), + "month_end": month_end.isoformat(), + "month_ist_minutes": month_ist, + "month_soll_minutes": month_soll, + "month_delta_minutes": month_delta, + "delta_mode": "sum_partial_week_delta_for_month_days", + "weeks": weekly_breakdown, + "days": days, + } + + return app + + +app = create_app() + + +if __name__ == "__main__": + import uvicorn + + app_settings = get_settings() + uvicorn.run( + "app.main:app", + host="0.0.0.0", + port=app_settings.port, + proxy_headers=True, + forwarded_allow_ips=app_settings.forwarded_allow_ips, + ) diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..d8448c5 --- /dev/null +++ b/app/models.py @@ -0,0 +1,311 @@ +from datetime import date, datetime, timezone +import uuid + +from sqlalchemy import Boolean, Date, DateTime, ForeignKey, Integer, String, Text, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class User(Base): + __tablename__ = "users" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + email: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False) + password_hash: Mapped[str] = mapped_column(String(255), nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + weekly_target_minutes: Mapped[int] = mapped_column(Integer, default=1500, nullable=False) + role: Mapped[str] = mapped_column(String(32), default="user", nullable=False) + is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + preferred_home_view: Mapped[str] = mapped_column(String(16), default="week", nullable=False) + preferred_month_view_mode: Mapped[str] = mapped_column(String(16), default="flat", nullable=False) + entry_mode: Mapped[str] = mapped_column(String(16), default="manual", nullable=False) + working_days_csv: Mapped[str] = mapped_column(String(32), default="0,1,2,3,4", nullable=False) + count_vacation_as_worktime: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + count_holiday_as_worktime: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + count_sick_as_worktime: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + automatic_break_rules_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + default_break_minutes: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + overtime_start_date: Mapped[date | None] = mapped_column(Date, default=None) + overtime_expiry_days: Mapped[int | None] = mapped_column(Integer, default=None) + expire_negative_overtime: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + vacation_days_total: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + vacation_show_in_header: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + workhours_counter_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + workhours_counter_show_in_header: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + workhours_counter_start_date: Mapped[date | None] = mapped_column(Date, default=None) + workhours_counter_end_date: Mapped[date | None] = mapped_column(Date, default=None) + workhours_counter_manual_offset_minutes: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + workhours_counter_target_minutes: Mapped[int | None] = mapped_column(Integer, default=None) + workhours_counter_target_email_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + workhours_counter_warning_last_sent_on: Mapped[date | None] = mapped_column(Date, default=None) + workhours_counter_warning_last_sent_key: Mapped[str | None] = mapped_column(String(120), default=None) + federal_state: Mapped[str | None] = mapped_column(String(8), default=None) + email_verified: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + email_verification_token_hash: Mapped[str | None] = mapped_column(String(128), default=None, index=True) + email_verification_expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + email_verification_sent_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + mfa_method: Mapped[str] = mapped_column(String(16), default="none", nullable=False) + mfa_totp_secret_encrypted: Mapped[str | None] = mapped_column(Text, default=None) + mfa_email_code_hash: Mapped[str | None] = mapped_column(String(255), default=None) + mfa_email_code_expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + mfa_email_code_sent_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + + time_entries: Mapped[list["TimeEntry"]] = relationship( + "TimeEntry", back_populates="user", cascade="all, delete-orphan" + ) + weekly_target_rules: Mapped[list["WeeklyTargetRule"]] = relationship( + "WeeklyTargetRule", + back_populates="user", + cascade="all, delete-orphan", + order_by="WeeklyTargetRule.effective_from", + ) + vacation_periods: Mapped[list["VacationPeriod"]] = relationship( + "VacationPeriod", + back_populates="user", + cascade="all, delete-orphan", + order_by="VacationPeriod.start_date", + ) + special_day_statuses: Mapped[list["SpecialDayStatus"]] = relationship( + "SpecialDayStatus", + back_populates="user", + cascade="all, delete-orphan", + order_by="SpecialDayStatus.date", + ) + overtime_adjustments: Mapped[list["OvertimeAdjustment"]] = relationship( + "OvertimeAdjustment", + back_populates="user", + cascade="all, delete-orphan", + order_by="OvertimeAdjustment.date", + ) + auto_entry_skips: Mapped[list["AutoEntrySkip"]] = relationship( + "AutoEntrySkip", + back_populates="user", + cascade="all, delete-orphan", + order_by="AutoEntrySkip.date", + ) + password_reset_tokens: Mapped[list["PasswordResetToken"]] = relationship( + "PasswordResetToken", + back_populates="user", + cascade="all, delete-orphan", + order_by="PasswordResetToken.created_at.desc()", + ) + import_previews: Mapped[list["ImportPreview"]] = relationship( + "ImportPreview", + back_populates="user", + cascade="all, delete-orphan", + order_by="ImportPreview.created_at.desc()", + ) + support_tickets: Mapped[list["SupportTicket"]] = relationship( + "SupportTicket", + back_populates="user", + cascade="all, delete-orphan", + order_by="SupportTicket.created_at.desc()", + foreign_keys="SupportTicket.user_id", + ) + + +class TimeEntry(Base): + __tablename__ = "time_entries" + __table_args__ = (UniqueConstraint("user_id", "date", name="uq_user_date"),) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + + start_minutes: Mapped[int] = mapped_column(Integer, nullable=False) + end_minutes: Mapped[int] = mapped_column(Integer, nullable=False) + break_minutes: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + break_rule_mode: Mapped[str] = mapped_column(String(16), default="manual", nullable=False) + + notes: Mapped[str | None] = mapped_column(Text, default=None) + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc) + ) + + user: Mapped[User] = relationship("User", back_populates="time_entries") + + +class LoginAttempt(Base): + __tablename__ = "login_attempts" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + email: Mapped[str] = mapped_column(String(255), index=True, nullable=False) + ip_address: Mapped[str] = mapped_column(String(64), index=True, nullable=False) + success: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False, index=True + ) + + +class WeeklyTargetRule(Base): + __tablename__ = "weekly_target_rules" + __table_args__ = (UniqueConstraint("user_id", "effective_from", name="uq_user_effective_from"),) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + effective_from: Mapped[date] = mapped_column(Date, index=True, nullable=False) + weekly_target_minutes: Mapped[int] = mapped_column(Integer, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + + user: Mapped[User] = relationship("User", back_populates="weekly_target_rules") + + +class VacationPeriod(Base): + __tablename__ = "vacation_periods" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + start_date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + end_date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + include_weekends: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + notes: Mapped[str | None] = mapped_column(Text, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + + user: Mapped[User] = relationship("User", back_populates="vacation_periods") + + +class SpecialDayStatus(Base): + __tablename__ = "special_day_statuses" + __table_args__ = (UniqueConstraint("user_id", "date", name="uq_user_special_day_date"),) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + status: Mapped[str] = mapped_column(String(16), nullable=False) # holiday | sick + notes: Mapped[str | None] = mapped_column(Text, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + + user: Mapped[User] = relationship("User", back_populates="special_day_statuses") + + +class OvertimeAdjustment(Base): + __tablename__ = "overtime_adjustments" + __table_args__ = (UniqueConstraint("user_id", "date", name="uq_user_overtime_adjustment_date"),) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + minutes: Mapped[int] = mapped_column(Integer, nullable=False) + notes: Mapped[str | None] = mapped_column(Text, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + + user: Mapped[User] = relationship("User", back_populates="overtime_adjustments") + + +class AutoEntrySkip(Base): + __tablename__ = "auto_entry_skips" + __table_args__ = (UniqueConstraint("user_id", "date", name="uq_user_auto_entry_skip_date"),) + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + date: Mapped[date] = mapped_column(Date, index=True, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False + ) + + user: Mapped[User] = relationship("User", back_populates="auto_entry_skips") + + +class PasswordResetToken(Base): + __tablename__ = "password_reset_tokens" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + token_hash: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False) + expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True) + used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + requested_ip: Mapped[str | None] = mapped_column(String(64), default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False, index=True + ) + + user: Mapped[User] = relationship("User", back_populates="password_reset_tokens") + + +class ImportPreview(Base): + __tablename__ = "import_previews" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), index=True) + mode: Mapped[str] = mapped_column(String(32), nullable=False) + payload_json: Mapped[str] = mapped_column(Text, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False, index=True + ) + + user: Mapped[User] = relationship("User", back_populates="import_previews") + + +class EmailServerConfig(Base): + __tablename__ = "email_server_config" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + smtp_host: Mapped[str] = mapped_column(String(255), default="", nullable=False) + smtp_port: Mapped[int] = mapped_column(Integer, default=587, nullable=False) + smtp_username: Mapped[str | None] = mapped_column(String(255), default=None) + smtp_password_encrypted: Mapped[str | None] = mapped_column(Text, default=None) + from_email: Mapped[str] = mapped_column(String(255), default="", nullable=False) + from_name: Mapped[str] = mapped_column(String(255), default="Stundenfuchs", nullable=False) + use_starttls: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + use_ssl: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + verify_tls: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + registration_mails_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + password_reset_mails_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + registration_admin_notify_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + registration_admin_notify_admin_ids_csv: Mapped[str | None] = mapped_column(String(1024), default=None) + updated_by_user_id: Mapped[str | None] = mapped_column(String(36), ForeignKey("users.id", ondelete="SET NULL")) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc) + ) + + +class SiteContent(Base): + __tablename__ = "site_content" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + key: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False) + markdown_text: Mapped[str] = mapped_column(Text, default="", nullable=False) + updated_by_user_id: Mapped[str | None] = mapped_column(String(36), ForeignKey("users.id", ondelete="SET NULL")) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc) + ) + + +class SupportTicket(Base): + __tablename__ = "support_tickets" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id: Mapped[str | None] = mapped_column(String(36), ForeignKey("users.id", ondelete="SET NULL"), index=True) + category: Mapped[str] = mapped_column(String(24), default="problem", nullable=False) + status: Mapped[str] = mapped_column(String(24), default="open", nullable=False, index=True) + name: Mapped[str] = mapped_column(String(255), default="", nullable=False) + email: Mapped[str] = mapped_column(String(255), index=True, nullable=False) + subject: Mapped[str] = mapped_column(String(255), nullable=False) + message: Mapped[str] = mapped_column(Text, nullable=False) + admin_notes: Mapped[str | None] = mapped_column(Text, default=None) + source_ip_hash: Mapped[str | None] = mapped_column(String(128), index=True) + source_user_agent: Mapped[str | None] = mapped_column(String(512), default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False, index=True + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc) + ) + closed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + + user: Mapped[User | None] = relationship("User", back_populates="support_tickets", foreign_keys=[user_id]) diff --git a/app/schemas.py b/app/schemas.py new file mode 100644 index 0000000..cc5e083 --- /dev/null +++ b/app/schemas.py @@ -0,0 +1,45 @@ +from datetime import date + +from pydantic import BaseModel, EmailStr, Field + + +class RegisterRequest(BaseModel): + email: EmailStr + password: str = Field(min_length=10, max_length=255) + + +class LoginRequest(BaseModel): + email: EmailStr + password: str = Field(min_length=1, max_length=255) + + +class MFAChallengeRequest(BaseModel): + code: str = Field(min_length=1, max_length=32) + + +class TimeEntryCreate(BaseModel): + date: date + start_time: str + end_time: str + break_minutes: int | None = Field(default=None, ge=0) + break_mode: str | None = Field(default=None, pattern="^(manual|auto)$") + notes: str | None = None + + +class TimeEntryUpdate(BaseModel): + start_time: str | None = None + end_time: str | None = None + break_minutes: int | None = Field(default=None, ge=0) + break_mode: str | None = Field(default=None, pattern="^(manual|auto)$") + notes: str | None = None + + +class TimeEntryOut(BaseModel): + id: str + date: date + start_time: str + end_time: str + break_minutes: int + break_mode: str + net_minutes: int + notes: str | None diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/services/auto_entries.py b/app/services/auto_entries.py new file mode 100644 index 0000000..30cf312 --- /dev/null +++ b/app/services/auto_entries.py @@ -0,0 +1,332 @@ +from datetime import date, timedelta + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.models import AutoEntrySkip, OvertimeAdjustment, SpecialDayStatus, TimeEntry, User +from app.services.calculations import automatic_break_minutes_for_net_minutes, compute_net_minutes +from app.services.targets import list_rules_for_user, monday_of, target_for_week +from app.services.vacations import expand_vacation_dates, list_vacations_for_user +from app.services.workdays import parse_working_days_csv + +ENTRY_MODE_MANUAL = "manual" +ENTRY_MODE_AUTO_UNTIL_TODAY = "auto_until_today" +AUTO_ENTRY_NOTE = "Automatisch vorausgefuellt" +SPECIAL_DAY_STATUS_HOLIDAY = "holiday" +SPECIAL_DAY_STATUS_SICK = "sick" + + +def get_user_working_days(user: User) -> set[int]: + return parse_working_days_csv(user.working_days_csv) + + +def list_special_statuses_for_user( + db: Session, + user_id: str, + from_date: date, + to_date: date, +) -> list[SpecialDayStatus]: + stmt = ( + select(SpecialDayStatus) + .where( + SpecialDayStatus.user_id == user_id, + SpecialDayStatus.date >= from_date, + SpecialDayStatus.date <= to_date, + ) + .order_by(SpecialDayStatus.date.asc()) + ) + return db.execute(stmt).scalars().all() + + +def special_status_map(periods: list[SpecialDayStatus]) -> dict[date, str]: + return {period.date: period.status for period in periods} + + +def special_status_dates(periods: list[SpecialDayStatus]) -> set[date]: + return {period.date for period in periods} + + +def count_as_worktime_dates_for_user( + *, + user: User, + vacation_dates: set[date], + special_statuses: list[SpecialDayStatus], +) -> set[date]: + dates: set[date] = set() + if user.count_vacation_as_worktime: + dates.update(vacation_dates) + if user.count_holiday_as_worktime: + dates.update(period.date for period in special_statuses if period.status == SPECIAL_DAY_STATUS_HOLIDAY) + if user.count_sick_as_worktime: + dates.update(period.date for period in special_statuses if period.status == SPECIAL_DAY_STATUS_SICK) + return dates + + +def effective_non_working_dates_for_user( + *, + user: User, + special_statuses: list[SpecialDayStatus], +) -> set[date]: + blocked: set[date] = set() + for period in special_statuses: + if period.status == SPECIAL_DAY_STATUS_HOLIDAY and user.count_holiday_as_worktime: + continue + if period.status == SPECIAL_DAY_STATUS_SICK and user.count_sick_as_worktime: + continue + blocked.add(period.date) + return blocked + + +def clear_special_status_for_date(*, db: Session, user_id: str, day: date) -> None: + stmt = select(SpecialDayStatus).where(SpecialDayStatus.user_id == user_id, SpecialDayStatus.date == day) + existing = db.execute(stmt).scalar_one_or_none() + if existing: + db.delete(existing) + + +def list_overtime_adjustments_for_user( + db: Session, + user_id: str, + from_date: date, + to_date: date, +) -> list[OvertimeAdjustment]: + stmt = ( + select(OvertimeAdjustment) + .where( + OvertimeAdjustment.user_id == user_id, + OvertimeAdjustment.date >= from_date, + OvertimeAdjustment.date <= to_date, + ) + .order_by(OvertimeAdjustment.date.asc()) + ) + return db.execute(stmt).scalars().all() + + +def overtime_adjustment_map(adjustments: list[OvertimeAdjustment]) -> dict[date, OvertimeAdjustment]: + return {adjustment.date: adjustment for adjustment in adjustments} + + +def overtime_adjustment_minutes_map(adjustments: list[OvertimeAdjustment]) -> dict[date, int]: + return {adjustment.date: adjustment.minutes for adjustment in adjustments} + + +def clear_overtime_adjustment_for_date(*, db: Session, user_id: str, day: date) -> None: + stmt = select(OvertimeAdjustment).where(OvertimeAdjustment.user_id == user_id, OvertimeAdjustment.date == day) + existing = db.execute(stmt).scalar_one_or_none() + if existing: + db.delete(existing) + + +def auto_entry_skip_dates_for_user( + db: Session, + user_id: str, + from_date: date, + to_date: date, +) -> set[date]: + stmt = ( + select(AutoEntrySkip.date) + .where( + AutoEntrySkip.user_id == user_id, + AutoEntrySkip.date >= from_date, + AutoEntrySkip.date <= to_date, + ) + .order_by(AutoEntrySkip.date.asc()) + ) + return set(db.execute(stmt).scalars().all()) + + +def mark_auto_entry_skip_for_date(*, db: Session, user_id: str, day: date) -> None: + stmt = select(AutoEntrySkip).where(AutoEntrySkip.user_id == user_id, AutoEntrySkip.date == day) + existing = db.execute(stmt).scalar_one_or_none() + if not existing: + db.add(AutoEntrySkip(user_id=user_id, date=day)) + + +def clear_auto_entry_skip_for_date(*, db: Session, user_id: str, day: date) -> None: + stmt = select(AutoEntrySkip).where(AutoEntrySkip.user_id == user_id, AutoEntrySkip.date == day) + existing = db.execute(stmt).scalar_one_or_none() + if existing: + db.delete(existing) + + +def build_auto_day_entry( + *, + weekly_target_minutes: int, + workdays_per_week: int, + automatic_break_rules_enabled: bool, + default_break_minutes: int, +) -> tuple[int, int, int] | None: + if workdays_per_week <= 0: + return None + + day_net_minutes = int(round(weekly_target_minutes / workdays_per_week)) + if day_net_minutes <= 0: + return None + + start_minutes = 8 * 60 + 30 + break_minutes = ( + automatic_break_minutes_for_net_minutes(day_net_minutes) + if automatic_break_rules_enabled + else max(0, default_break_minutes) + ) + end_minutes = start_minutes + day_net_minutes + break_minutes + + if end_minutes > (24 * 60 - 1): + end_minutes = 24 * 60 - 1 + available_span = end_minutes - start_minutes + if available_span <= 0: + return None + break_minutes = min(break_minutes, max(0, available_span - 1)) + + return start_minutes, end_minutes, break_minutes + + +def auto_entry_sync_start_date(user: User) -> date: + if user.overtime_start_date: + return user.overtime_start_date + return user.created_at.date() + + +def delete_future_auto_entries( + *, + db: Session, + user_id: str, + after_date: date, +) -> int: + stmt = ( + select(TimeEntry) + .where( + TimeEntry.user_id == user_id, + TimeEntry.date > after_date, + TimeEntry.notes == AUTO_ENTRY_NOTE, + ) + .order_by(TimeEntry.date.asc()) + ) + entries = db.execute(stmt).scalars().all() + for entry in entries: + db.delete(entry) + return len(entries) + + +def autofill_entries_for_range( + *, + db: Session, + user: User, + range_start: date, + range_end: date, +) -> int: + if user.entry_mode != ENTRY_MODE_AUTO_UNTIL_TODAY: + return 0 + if range_end < range_start: + return 0 + + effective_end = min(range_end, date.today()) + effective_start = max(range_start, auto_entry_sync_start_date(user)) + if effective_start > effective_end: + return 0 + + working_days = get_user_working_days(user) + if not working_days: + return 0 + workdays_per_week = len(working_days) + + rules = list_rules_for_user(db, user.id) + vacations = list_vacations_for_user(db, user.id, effective_start, effective_end) + vacation_dates = expand_vacation_dates(vacations, effective_start, effective_end, relevant_weekdays=working_days) + special_statuses = list_special_statuses_for_user(db, user.id, effective_start, effective_end) + special_dates = special_status_dates(special_statuses) + overtime_adjustments = list_overtime_adjustments_for_user(db, user.id, effective_start, effective_end) + adjustment_dates = set(overtime_adjustment_minutes_map(overtime_adjustments).keys()) + skipped_auto_dates = auto_entry_skip_dates_for_user(db, user.id, effective_start, effective_end) + + existing_dates_stmt = ( + select(TimeEntry.date) + .where( + TimeEntry.user_id == user.id, + TimeEntry.date >= effective_start, + TimeEntry.date <= effective_end, + ) + .order_by(TimeEntry.date.asc()) + ) + existing_dates = set(db.execute(existing_dates_stmt).scalars().all()) + + created = 0 + cursor = effective_start + while cursor <= effective_end: + if cursor in existing_dates: + cursor += timedelta(days=1) + continue + if cursor in vacation_dates: + cursor += timedelta(days=1) + continue + if cursor in special_dates: + cursor += timedelta(days=1) + continue + if cursor in adjustment_dates: + cursor += timedelta(days=1) + continue + if cursor in skipped_auto_dates: + cursor += timedelta(days=1) + continue + if cursor.weekday() not in working_days: + cursor += timedelta(days=1) + continue + + weekly_target_minutes = target_for_week(rules, monday_of(cursor), user.weekly_target_minutes) + entry_values = build_auto_day_entry( + weekly_target_minutes=weekly_target_minutes, + workdays_per_week=workdays_per_week, + automatic_break_rules_enabled=bool(user.automatic_break_rules_enabled), + default_break_minutes=user.default_break_minutes, + ) + if entry_values is None: + cursor += timedelta(days=1) + continue + start_minutes, end_minutes, break_minutes = entry_values + + db.add( + TimeEntry( + user_id=user.id, + date=cursor, + start_minutes=start_minutes, + end_minutes=end_minutes, + break_minutes=break_minutes, + break_rule_mode="auto", + notes=AUTO_ENTRY_NOTE, + ) + ) + existing_dates.add(cursor) + created += 1 + cursor += timedelta(days=1) + + return created + + +def sync_auto_entries_for_all_users( + *, + db: Session, + up_to_date: date | None = None, +) -> dict[str, int]: + effective_date = up_to_date or date.today() + stmt = ( + select(User) + .where( + User.is_active.is_(True), + User.entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY, + ) + .order_by(User.created_at.asc()) + ) + users = db.execute(stmt).scalars().all() + + created = 0 + deleted = 0 + for user in users: + deleted += delete_future_auto_entries(db=db, user_id=user.id, after_date=effective_date) + created += autofill_entries_for_range( + db=db, + user=user, + range_start=auto_entry_sync_start_date(user), + range_end=effective_date, + ) + + return {"users": len(users), "created": created, "deleted_future": deleted} diff --git a/app/services/calculations.py b/app/services/calculations.py new file mode 100644 index 0000000..4a014e5 --- /dev/null +++ b/app/services/calculations.py @@ -0,0 +1,126 @@ +from collections import defaultdict +from datetime import date, datetime, timedelta +import re + + +def parse_time_to_minutes(value: str) -> int: + if not re.fullmatch(r"([01]\d|2[0-3]):[0-5]\d", value): + raise ValueError("Uhrzeit muss im Format HH:MM sein") + try: + parsed = datetime.strptime(value, "%H:%M") + except ValueError as exc: + raise ValueError("Uhrzeit muss im Format HH:MM sein") from exc + return parsed.hour * 60 + parsed.minute + + +def minutes_to_hhmm(minutes: int) -> str: + sign = "-" if minutes < 0 else "" + minutes_abs = abs(minutes) + hours = minutes_abs // 60 + mins = minutes_abs % 60 + return f"{sign}{hours:02d}:{mins:02d}" + + +def validate_entry(start_minutes: int, end_minutes: int, break_minutes: int) -> None: + if end_minutes <= start_minutes: + raise ValueError("Arbeitsende muss nach Arbeitsbeginn liegen") + + if break_minutes < 0: + raise ValueError("Pause darf nicht negativ sein") + + gross_minutes = end_minutes - start_minutes + if break_minutes > gross_minutes: + raise ValueError("Pause darf nicht laenger als die Arbeitszeit sein") + + +def required_break_minutes_for_span(work_span_minutes: int) -> int: + if work_span_minutes > 9 * 60: + return 45 + if work_span_minutes > 6 * 60: + return 30 + return 0 + + +def automatic_break_minutes(start_minutes: int, end_minutes: int) -> int: + if end_minutes <= start_minutes: + raise ValueError("Arbeitsende muss nach Arbeitsbeginn liegen") + return required_break_minutes_for_span(end_minutes - start_minutes) + + +def automatic_break_minutes_for_net_minutes(net_minutes: int) -> int: + if net_minutes < 0: + raise ValueError("Nettoarbeitszeit darf nicht negativ sein") + if net_minutes > (9 * 60 - 45): + return 45 + if net_minutes > (6 * 60 - 30): + return 30 + return 0 + + +def compute_net_minutes(start_minutes: int, end_minutes: int, break_minutes: int) -> int: + validate_entry(start_minutes, end_minutes, break_minutes) + return (end_minutes - start_minutes) - break_minutes + + +def iso_week_bounds(day: date) -> tuple[date, date]: + week_start = day - timedelta(days=day.weekday()) + week_end = week_start + timedelta(days=6) + return week_start, week_end + + +def daterange(start: date, end: date): + current = start + while current <= end: + yield current + current += timedelta(days=1) + + +def aggregate_week(entries: list, week_start: date, weekly_target_minutes: int) -> dict: + week_end = week_start + timedelta(days=6) + entries_by_date = {entry.date: entry for entry in entries} + + days = [] + weekly_ist = 0 + for day in daterange(week_start, week_end): + entry = entries_by_date.get(day) + if entry is None: + days.append({"date": day, "entry": None, "net_minutes": 0}) + continue + + net_minutes = compute_net_minutes(entry.start_minutes, entry.end_minutes, entry.break_minutes) + weekly_ist += net_minutes + days.append({"date": day, "entry": entry, "net_minutes": net_minutes}) + + weekly_delta = weekly_ist - weekly_target_minutes + + return { + "week_start": week_start, + "week_end": week_end, + "days": days, + "weekly_ist": weekly_ist, + "weekly_soll": weekly_target_minutes, + "weekly_delta": weekly_delta, + } + + +def cumulative_delta(entries: list, selected_week_start: date, weekly_target_minutes: int) -> int: + if not entries: + return 0 + + earliest_entry_date = min(entry.date for entry in entries) + current_week_start = earliest_entry_date - timedelta(days=earliest_entry_date.weekday()) + + net_by_week_start = defaultdict(int) + for entry in entries: + week_start, _ = iso_week_bounds(entry.date) + net_by_week_start[week_start] += compute_net_minutes( + entry.start_minutes, entry.end_minutes, entry.break_minutes + ) + + running = 0 + while current_week_start <= selected_week_start: + weekly_ist = net_by_week_start[current_week_start] + running += weekly_ist - weekly_target_minutes + current_week_start += timedelta(days=7) + + return running diff --git a/app/services/emailing.py b/app/services/emailing.py new file mode 100644 index 0000000..a8d60a7 --- /dev/null +++ b/app/services/emailing.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from dataclasses import dataclass +from email.message import EmailMessage +import smtplib +import ssl + + +@dataclass +class MailServerSettings: + smtp_host: str + smtp_port: int + smtp_username: str | None + smtp_password: str | None + from_email: str + from_name: str + use_starttls: bool + use_ssl: bool + verify_tls: bool + timeout_seconds: int = 15 + + +def _build_context(verify_tls: bool) -> ssl.SSLContext: + context = ssl.create_default_context() + if verify_tls: + return context + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + return context + + +def send_email( + *, + settings: MailServerSettings, + to_email: str, + subject: str, + text_body: str, +) -> None: + if not settings.smtp_host.strip(): + raise ValueError("SMTP host is empty") + if not settings.from_email.strip(): + raise ValueError("From email is empty") + + msg = EmailMessage() + msg["Subject"] = subject + msg["From"] = f"{settings.from_name} <{settings.from_email}>" + msg["To"] = to_email + msg.set_content(text_body) + + ssl_context = _build_context(settings.verify_tls) + if settings.use_ssl: + with smtplib.SMTP_SSL( + settings.smtp_host, + settings.smtp_port, + timeout=settings.timeout_seconds, + context=ssl_context, + ) as smtp: + if settings.smtp_username: + smtp.login(settings.smtp_username, settings.smtp_password or "") + smtp.send_message(msg) + return + + with smtplib.SMTP(settings.smtp_host, settings.smtp_port, timeout=settings.timeout_seconds) as smtp: + smtp.ehlo() + if settings.use_starttls: + smtp.starttls(context=ssl_context) + smtp.ehlo() + if settings.smtp_username: + smtp.login(settings.smtp_username, settings.smtp_password or "") + smtp.send_message(msg) diff --git a/app/services/exporters.py b/app/services/exporters.py new file mode 100644 index 0000000..20147f5 --- /dev/null +++ b/app/services/exporters.py @@ -0,0 +1,237 @@ +import json +from datetime import date +from io import BytesIO + +from openpyxl import Workbook +from reportlab.lib.pagesizes import A4, landscape +from reportlab.pdfgen import canvas + +from app.services.calculations import minutes_to_hhmm +from app.services.targets import monday_of + + +def create_excel_export(rows: list[dict], week_summaries: list[dict], totals: dict, title: str) -> bytes: + workbook = Workbook() + sheet = workbook.active + sheet.title = "Tage" + + headers = [ + "Datum", + "Wochentag", + "KW", + "Start", + "Ende", + "Pause (min)", + "Brutto", + "Netto", + "Stundenausgleich", + "Sonderstatus", + "Wochen-Soll", + "Wochen-Delta", + "Notiz", + ] + sheet.append(headers) + + for row in rows: + sheet.append( + [ + row["date"].isoformat(), + row["weekday_name"], + row["iso_week"], + row["start_time"] or "", + row["end_time"] or "", + row["break_minutes"], + minutes_to_hhmm(row["gross_minutes"]), + minutes_to_hhmm(row["net_minutes"]), + minutes_to_hhmm(row["overtime_adjustment_minutes"]), + row["special_status_label"] or "", + minutes_to_hhmm(row["weekly_target_minutes"]), + minutes_to_hhmm(row["weekly_delta_minutes"]), + row["notes"] or "", + ] + ) + + for col in ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M"]: + sheet.column_dimensions[col].width = 16 + + summary = workbook.create_sheet("Wochen") + summary_headers = ["KW-Start", "KW-Ende", "Ist", "Soll", "Delta"] + summary.append(summary_headers) + + for item in week_summaries: + summary.append( + [ + item["week_start"].isoformat(), + item["week_end"].isoformat(), + minutes_to_hhmm(item["ist_minutes"]), + minutes_to_hhmm(item["soll_minutes"]), + minutes_to_hhmm(item["delta_minutes"]), + ] + ) + + summary.append([]) + summary.append(["Gesamt", "", minutes_to_hhmm(totals["ist_minutes"]), "", minutes_to_hhmm(totals["delta_minutes"])]) + + meta = workbook.create_sheet("Meta") + meta.append([title]) + meta.append([f"Zeitraum: {totals['from_date'].isoformat()} bis {totals['to_date'].isoformat()}"]) + + output = BytesIO() + workbook.save(output) + return output.getvalue() + + +def create_pdf_export(rows: list[dict], week_summaries: list[dict], totals: dict, title: str) -> bytes: + output = BytesIO() + pdf = canvas.Canvas(output, pagesize=landscape(A4)) + width, height = landscape(A4) + + y = height - 35 + pdf.setFont("Helvetica-Bold", 13) + pdf.drawString(24, y, title) + y -= 18 + + pdf.setFont("Helvetica", 10) + pdf.drawString(24, y, f"Zeitraum: {totals['from_date'].isoformat()} bis {totals['to_date'].isoformat()}") + y -= 24 + + pdf.setFont("Helvetica-Bold", 8) + pdf.drawString(24, y, "Datum") + pdf.drawString(88, y, "Tag") + pdf.drawString(124, y, "KW") + pdf.drawString(154, y, "Start") + pdf.drawString(198, y, "Ende") + pdf.drawString(242, y, "Pause") + pdf.drawString(286, y, "Brutto") + pdf.drawString(338, y, "Netto") + pdf.drawString(390, y, "Ausgl.") + pdf.drawString(436, y, "Status") + pdf.drawString(490, y, "Soll") + pdf.drawString(542, y, "W-Delta") + pdf.drawString(610, y, "Notiz") + y -= 12 + + pdf.setFont("Helvetica", 8) + for row in rows: + if y < 40: + pdf.showPage() + y = height - 30 + pdf.setFont("Helvetica", 8) + + note = (row["notes"] or "").strip() + if len(note) > 18: + note = f"{note[:15]}..." + + pdf.drawString(24, y, row["date"].isoformat()) + pdf.drawString(88, y, row["weekday_short"]) + pdf.drawString(124, y, str(row["iso_week"])) + pdf.drawString(154, y, row["start_time"] or "-") + pdf.drawString(198, y, row["end_time"] or "-") + pdf.drawString(242, y, str(row["break_minutes"])) + pdf.drawString(286, y, minutes_to_hhmm(row["gross_minutes"])) + pdf.drawString(338, y, minutes_to_hhmm(row["net_minutes"])) + pdf.drawString(390, y, minutes_to_hhmm(row["overtime_adjustment_minutes"])) + pdf.drawString(436, y, row["special_status_label"] or "-") + pdf.drawString(490, y, minutes_to_hhmm(row["weekly_target_minutes"])) + pdf.drawString(542, y, minutes_to_hhmm(row["weekly_delta_minutes"])) + pdf.drawString(610, y, note) + y -= 11 + + y -= 12 + pdf.setFont("Helvetica-Bold", 10) + pdf.drawString(24, y, "Wochenzusammenfassung") + y -= 14 + + pdf.setFont("Helvetica", 9) + for item in week_summaries: + if y < 40: + pdf.showPage() + y = height - 30 + pdf.setFont("Helvetica", 9) + + line = ( + f"{item['week_start'].isoformat()} - {item['week_end'].isoformat()} | " + f"Ist {minutes_to_hhmm(item['ist_minutes'])} | " + f"Soll {minutes_to_hhmm(item['soll_minutes'])} | " + f"Delta {minutes_to_hhmm(item['delta_minutes'])}" + ) + pdf.drawString(24, y, line) + y -= 12 + + y -= 10 + pdf.setFont("Helvetica-Bold", 10) + pdf.drawString( + 24, + y, + f"Gesamt Ist: {minutes_to_hhmm(totals['ist_minutes'])} | Gesamt Delta: {minutes_to_hhmm(totals['delta_minutes'])}", + ) + + pdf.save() + return output.getvalue() + + +def create_backup_export(payload: dict) -> bytes: + return json.dumps(payload, ensure_ascii=False, indent=2).encode("utf-8") + + +def build_export_rows( + days: list[date], + entries_by_date: dict, + week_target_map: dict[date, int], + week_ist_map: dict[date, int], + week_delta_map: dict[date, int], + special_status_map: dict[date, str] | None = None, + overtime_adjustment_map: dict[date, int] | None = None, +) -> list[dict]: + weekday_names = ["Montag", "Dienstag", "Mittwoch", "Donnerstag", "Freitag", "Samstag", "Sonntag"] + weekday_short = ["Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] + + special_status_map = special_status_map or {} + overtime_adjustment_map = overtime_adjustment_map or {} + special_status_labels = { + "holiday": "Feiertag", + "sick": "Krankheit", + } + + rows: list[dict] = [] + for day in days: + entry = entries_by_date.get(day) + week_start = monday_of(day) + weekly_target = week_target_map[week_start] + weekly_delta = week_delta_map[week_start] + + if entry: + gross = entry.end_minutes - entry.start_minutes + net = gross - entry.break_minutes + start_time = f"{entry.start_minutes // 60:02d}:{entry.start_minutes % 60:02d}" + end_time = f"{entry.end_minutes // 60:02d}:{entry.end_minutes % 60:02d}" + break_minutes = entry.break_minutes + notes = entry.notes + else: + gross = 0 + net = 0 + start_time = None + end_time = None + break_minutes = 0 + notes = None + + rows.append( + { + "date": day, + "weekday_name": weekday_names[day.weekday()], + "weekday_short": weekday_short[day.weekday()], + "iso_week": day.isocalendar()[1], + "start_time": start_time, + "end_time": end_time, + "break_minutes": break_minutes, + "gross_minutes": gross, + "net_minutes": net, + "overtime_adjustment_minutes": overtime_adjustment_map.get(day, 0), + "special_status_label": special_status_labels.get(special_status_map.get(day, "")), + "weekly_target_minutes": weekly_target, + "weekly_delta_minutes": weekly_delta, + "notes": notes, + } + ) + + return rows diff --git a/app/services/importers.py b/app/services/importers.py new file mode 100644 index 0000000..ff41880 --- /dev/null +++ b/app/services/importers.py @@ -0,0 +1,712 @@ +import json +from datetime import date, datetime, timedelta, timezone +from typing import Any + +from sqlalchemy import delete, select +from sqlalchemy.orm import Session + +from app.models import ( + AutoEntrySkip, + ImportPreview, + OvertimeAdjustment, + SpecialDayStatus, + TimeEntry, + User, + VacationPeriod, + WeeklyTargetRule, +) +from app.services.auto_entries import ( + ENTRY_MODE_AUTO_UNTIL_TODAY, + ENTRY_MODE_MANUAL, + delete_future_auto_entries, +) +from app.services.calculations import compute_net_minutes +from app.services.public_holidays import normalize_german_state_code +from app.services.security import utc_now +from app.services.targets import ensure_user_has_default_target_rule +from app.services.workdays import serialize_working_days + +CURRENT_BACKUP_VERSION = 2 +SUPPORTED_BACKUP_VERSIONS = {1, 2} +IMPORT_MODE_MERGE = "merge" +IMPORT_MODE_REPLACE = "replace_user_data" +IMPORT_PREVIEW_TTL_HOURS = 24 +MAX_BACKUP_BYTES = 5 * 1024 * 1024 +SPECIAL_STATUS_VALUES = {"holiday", "sick"} +PREFERRED_HOME_VIEWS = {"week", "month"} +PREFERRED_MONTH_VIEWS = {"flat", "weeks"} +BREAK_RULE_MODES = {"manual", "auto"} + + +class BackupImportError(ValueError): + pass + + +def supported_import_modes() -> set[str]: + return {IMPORT_MODE_MERGE, IMPORT_MODE_REPLACE} + + +def _require_mapping(value: Any, *, label: str) -> dict[str, Any]: + if not isinstance(value, dict): + raise BackupImportError(f"{label} ist nicht korrekt aufgebaut.") + return value + + +def _require_list(value: Any, *, label: str) -> list[Any]: + if value is None: + return [] + if not isinstance(value, list): + raise BackupImportError(f"{label} ist nicht korrekt aufgebaut.") + return value + + +def _parse_date(value: Any, *, label: str) -> date: + if not isinstance(value, str) or not value.strip(): + raise BackupImportError(f"{label} fehlt oder ist ungültig.") + try: + return date.fromisoformat(value) + except ValueError as exc: + raise BackupImportError(f"{label} hat kein gültiges Datum.") from exc + + +def _parse_datetime(value: Any, *, label: str) -> str | None: + if value in (None, ""): + return None + if not isinstance(value, str): + raise BackupImportError(f"{label} hat kein gültiges Datum.") + try: + datetime.fromisoformat(value.replace("Z", "+00:00")) + except ValueError as exc: + raise BackupImportError(f"{label} hat kein gültiges Datum.") from exc + return value + + +def _parse_int(value: Any, *, label: str, minimum: int | None = None) -> int: + if not isinstance(value, int): + raise BackupImportError(f"{label} ist keine ganze Zahl.") + if minimum is not None and value < minimum: + raise BackupImportError(f"{label} ist zu klein.") + return value + + +def _parse_optional_int(value: Any, *, label: str, minimum: int | None = None) -> int | None: + if value is None: + return None + return _parse_int(value, label=label, minimum=minimum) + + +def _parse_bool(value: Any, *, label: str) -> bool: + if not isinstance(value, bool): + raise BackupImportError(f"{label} muss true oder false sein.") + return value + + +def _parse_optional_text(value: Any, *, label: str) -> str | None: + if value in (None, ""): + return None + if not isinstance(value, str): + raise BackupImportError(f"{label} ist ungültig.") + return value.strip() or None + + +def _normalize_settings(payload: dict[str, Any]) -> dict[str, Any]: + settings_value = payload.get("settings") + if settings_value is None: + user_section = payload.get("user") + if isinstance(user_section, dict): + settings_value = user_section.get("settings") + settings_data = _require_mapping(settings_value, label="Backup-Einstellungen") + + working_days_raw = settings_data.get("working_days") + if not isinstance(working_days_raw, list) or not working_days_raw: + raise BackupImportError("Die relevanten Arbeitstage im Backup sind ungültig.") + working_days: list[int] = [] + for item in working_days_raw: + if not isinstance(item, int) or item < 0 or item > 6: + raise BackupImportError("Die relevanten Arbeitstage im Backup sind ungültig.") + if item not in working_days: + working_days.append(item) + if not working_days: + raise BackupImportError("Im Backup ist kein relevanter Arbeitstag hinterlegt.") + + preferred_home_view = settings_data.get("preferred_home_view", "week") + if preferred_home_view not in PREFERRED_HOME_VIEWS: + preferred_home_view = "week" + + preferred_month_view_mode = settings_data.get("preferred_month_view_mode", "flat") + if preferred_month_view_mode not in PREFERRED_MONTH_VIEWS: + preferred_month_view_mode = "flat" + + entry_mode = settings_data.get("entry_mode", ENTRY_MODE_MANUAL) + if entry_mode == "auto": + entry_mode = ENTRY_MODE_AUTO_UNTIL_TODAY + if entry_mode not in {ENTRY_MODE_MANUAL, ENTRY_MODE_AUTO_UNTIL_TODAY}: + raise BackupImportError("Der Erfassungsmodus im Backup ist ungültig.") + + federal_state = None + if settings_data.get("federal_state"): + federal_state = normalize_german_state_code(str(settings_data.get("federal_state"))) + if federal_state is None: + raise BackupImportError("Das Bundesland im Backup ist ungültig.") + + overtime_start_date = None + if settings_data.get("overtime_start_date"): + overtime_start_date = _parse_date(settings_data.get("overtime_start_date"), label="Überstunden-Startdatum") + + workhours_counter_start_date = None + if settings_data.get("workhours_counter_start_date"): + workhours_counter_start_date = _parse_date( + settings_data.get("workhours_counter_start_date"), + label="Arbeitsstunden-Counter Startdatum", + ) + + workhours_counter_end_date = None + if settings_data.get("workhours_counter_end_date"): + workhours_counter_end_date = _parse_date( + settings_data.get("workhours_counter_end_date"), + label="Arbeitsstunden-Counter Enddatum", + ) + + return { + "weekly_target_minutes": _parse_int(settings_data.get("weekly_target_minutes", 1500), label="Wochenstunden", minimum=1), + "preferred_home_view": preferred_home_view, + "preferred_month_view_mode": preferred_month_view_mode, + "entry_mode": entry_mode, + "working_days": sorted(working_days), + "count_vacation_as_worktime": _parse_bool( + settings_data.get("count_vacation_as_worktime", False), + label="Urlaubstage-wie-Arbeitstage", + ), + "count_holiday_as_worktime": _parse_bool( + settings_data.get("count_holiday_as_worktime", False), + label="Feiertage-wie-Arbeitstage", + ), + "count_sick_as_worktime": _parse_bool( + settings_data.get("count_sick_as_worktime", False), + label="Kranktage-wie-Arbeitstage", + ), + "automatic_break_rules_enabled": _parse_bool( + settings_data.get("automatic_break_rules_enabled", False), + label="Automatische Pausenregel", + ), + "default_break_minutes": _parse_int( + settings_data.get("default_break_minutes", 0), + label="Tägliche Pause", + minimum=0, + ), + "overtime_start_date": overtime_start_date.isoformat() if overtime_start_date else None, + "overtime_expiry_days": _parse_optional_int( + settings_data.get("overtime_expiry_days"), + label="Überstunden-Verfall", + minimum=1, + ), + "expire_negative_overtime": _parse_bool( + settings_data.get("expire_negative_overtime", False), + label="Negative Stunden verfallen", + ), + "vacation_days_total": _parse_int( + settings_data.get("vacation_days_total", 0), + label="Urlaubstage gesamt", + minimum=0, + ), + "vacation_show_in_header": _parse_bool( + settings_data.get("vacation_show_in_header", True), + label="Urlaub im Header anzeigen", + ), + "workhours_counter_enabled": _parse_bool( + settings_data.get("workhours_counter_enabled", False), + label="Arbeitsstunden-Counter aktiviert", + ), + "workhours_counter_show_in_header": _parse_bool( + settings_data.get("workhours_counter_show_in_header", False), + label="Arbeitsstunden-Counter im Header anzeigen", + ), + "workhours_counter_start_date": ( + workhours_counter_start_date.isoformat() if workhours_counter_start_date else None + ), + "workhours_counter_end_date": ( + workhours_counter_end_date.isoformat() if workhours_counter_end_date else None + ), + "workhours_counter_manual_offset_minutes": _parse_int( + settings_data.get("workhours_counter_manual_offset_minutes", 0), + label="Zusatzstunden", + minimum=0, + ), + "workhours_counter_target_minutes": _parse_optional_int( + settings_data.get("workhours_counter_target_minutes"), + label="Arbeitsstunden-Ziel", + minimum=1, + ), + "workhours_counter_target_email_enabled": _parse_bool( + settings_data.get("workhours_counter_target_email_enabled", False), + label="Counter-Zielwarnung per E-Mail", + ), + "federal_state": federal_state, + } + + +def _normalize_weekly_target_rules(items: list[Any]) -> list[dict[str, Any]]: + normalized: list[dict[str, Any]] = [] + seen: set[str] = set() + for item in _require_list(items, label="Wochenziel-Regeln"): + row = _require_mapping(item, label="Wochenziel-Regel") + effective_from = _parse_date(row.get("effective_from"), label="Wochenziel Startdatum").isoformat() + if effective_from in seen: + continue + seen.add(effective_from) + normalized.append( + { + "effective_from": effective_from, + "weekly_target_minutes": _parse_int( + row.get("weekly_target_minutes"), + label="Wochenziel in Minuten", + minimum=1, + ), + } + ) + normalized.sort(key=lambda item: item["effective_from"]) + return normalized + + +def _normalize_time_entries(items: list[Any]) -> list[dict[str, Any]]: + normalized: list[dict[str, Any]] = [] + seen: set[str] = set() + for item in _require_list(items, label="Arbeitszeiteinträge"): + row = _require_mapping(item, label="Arbeitszeiteintrag") + entry_date = _parse_date(row.get("date"), label="Arbeitszeiteintrag Datum").isoformat() + if entry_date in seen: + continue + seen.add(entry_date) + start_minutes = _parse_int(row.get("start_minutes"), label="Arbeitsbeginn", minimum=0) + end_minutes = _parse_int(row.get("end_minutes"), label="Arbeitsende", minimum=0) + break_minutes = _parse_int(row.get("break_minutes", 0), label="Pause", minimum=0) + break_rule_mode = row.get("break_rule_mode", "manual") + if break_rule_mode not in BREAK_RULE_MODES: + break_rule_mode = "manual" + compute_net_minutes(start_minutes, end_minutes, break_minutes) + normalized.append( + { + "date": entry_date, + "start_minutes": start_minutes, + "end_minutes": end_minutes, + "break_minutes": break_minutes, + "break_rule_mode": break_rule_mode, + "notes": _parse_optional_text(row.get("notes"), label="Notiz"), + } + ) + normalized.sort(key=lambda item: item["date"]) + return normalized + + +def _normalize_vacation_periods(items: list[Any]) -> list[dict[str, Any]]: + normalized: list[dict[str, Any]] = [] + seen: set[tuple[str, str, bool, str | None]] = set() + for item in _require_list(items, label="Urlaubszeiträume"): + row = _require_mapping(item, label="Urlaubszeitraum") + start_date = _parse_date(row.get("start_date"), label="Urlaubsbeginn") + end_date = _parse_date(row.get("end_date"), label="Urlaubsende") + if end_date < start_date: + raise BackupImportError("Ein Urlaubszeitraum endet vor seinem Startdatum.") + include_weekends = _parse_bool(row.get("include_weekends", False), label="Wochenenden einschließen") + notes = _parse_optional_text(row.get("notes"), label="Urlaubsnotiz") + key = (start_date.isoformat(), end_date.isoformat(), include_weekends, notes) + if key in seen: + continue + seen.add(key) + normalized.append( + { + "start_date": start_date.isoformat(), + "end_date": end_date.isoformat(), + "include_weekends": include_weekends, + "notes": notes, + } + ) + normalized.sort(key=lambda item: (item["start_date"], item["end_date"])) + return normalized + + +def _normalize_special_day_statuses(items: list[Any]) -> list[dict[str, Any]]: + normalized: list[dict[str, Any]] = [] + seen: set[str] = set() + for item in _require_list(items, label="Sondertage"): + row = _require_mapping(item, label="Sondertag") + status_date = _parse_date(row.get("date"), label="Sondertag Datum").isoformat() + if status_date in seen: + continue + seen.add(status_date) + status_value = row.get("status") + if status_value not in SPECIAL_STATUS_VALUES: + raise BackupImportError("Ein Sondertag im Backup hat einen ungültigen Status.") + normalized.append( + { + "date": status_date, + "status": status_value, + "notes": _parse_optional_text(row.get("notes"), label="Sondertag-Notiz"), + } + ) + normalized.sort(key=lambda item: item["date"]) + return normalized + + +def _normalize_overtime_adjustments(items: list[Any]) -> list[dict[str, Any]]: + normalized: list[dict[str, Any]] = [] + seen: set[str] = set() + for item in _require_list(items, label="Stundenausgleich"): + row = _require_mapping(item, label="Stundenausgleich-Eintrag") + adjustment_date = _parse_date(row.get("date"), label="Stundenausgleich Datum").isoformat() + if adjustment_date in seen: + continue + seen.add(adjustment_date) + normalized.append( + { + "date": adjustment_date, + "minutes": _parse_int(row.get("minutes"), label="Stundenausgleich Minuten"), + "notes": _parse_optional_text(row.get("notes"), label="Stundenausgleich-Notiz"), + } + ) + normalized.sort(key=lambda item: item["date"]) + return normalized + + +def load_backup_payload_from_bytes(payload_bytes: bytes) -> dict[str, Any]: + if not payload_bytes: + raise BackupImportError("Die Backup-Datei ist leer.") + if len(payload_bytes) > MAX_BACKUP_BYTES: + raise BackupImportError("Die Backup-Datei ist zu groß.") + try: + raw = json.loads(payload_bytes.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + raise BackupImportError("Die Backup-Datei ist kein gültiges JSON.") from exc + + payload = _require_mapping(raw, label="Backup-Datei") + version = payload.get("backup_version") + if version not in SUPPORTED_BACKUP_VERSIONS: + raise BackupImportError("Diese Backup-Version wird noch nicht unterstützt.") + + normalized = { + "backup_version": version, + "source_app_name": str(payload.get("app_name") or "Stundenfuchs"), + "source_app_version": str(payload.get("app_version") or "unbekannt"), + "exported_at": _parse_datetime(payload.get("exported_at"), label="Exportdatum"), + "settings": _normalize_settings(payload), + "weekly_target_rules": _normalize_weekly_target_rules(payload.get("weekly_target_rules")), + "time_entries": _normalize_time_entries(payload.get("time_entries")), + "vacation_periods": _normalize_vacation_periods(payload.get("vacation_periods")), + "special_day_statuses": _normalize_special_day_statuses(payload.get("special_day_statuses")), + "overtime_adjustments": _normalize_overtime_adjustments(payload.get("overtime_adjustments")), + } + return normalized + + +def summarize_backup_payload(payload: dict[str, Any]) -> dict[str, Any]: + settings_data = payload["settings"] + return { + "backup_version": payload["backup_version"], + "source_app_name": payload["source_app_name"], + "source_app_version": payload["source_app_version"], + "exported_at": payload["exported_at"], + "settings_summary": { + "entry_mode": settings_data["entry_mode"], + "weekly_target_minutes": settings_data["weekly_target_minutes"], + "working_days": settings_data["working_days"], + "federal_state": settings_data["federal_state"], + "vacation_days_total": settings_data["vacation_days_total"], + "workhours_counter_enabled": settings_data["workhours_counter_enabled"], + }, + "counts": { + "weekly_target_rules": len(payload["weekly_target_rules"]), + "time_entries": len(payload["time_entries"]), + "vacation_periods": len(payload["vacation_periods"]), + "special_day_statuses": len(payload["special_day_statuses"]), + "overtime_adjustments": len(payload["overtime_adjustments"]), + }, + } + + +def build_import_preview(*, db: Session, user: User, payload: dict[str, Any], mode: str) -> dict[str, Any]: + if mode not in supported_import_modes(): + raise BackupImportError("Ungültiger Importmodus.") + + existing_time_entry_dates = set( + db.execute(select(TimeEntry.date).where(TimeEntry.user_id == user.id)).scalars().all() + ) + existing_special_dates = set( + db.execute(select(SpecialDayStatus.date).where(SpecialDayStatus.user_id == user.id)).scalars().all() + ) + existing_adjustment_dates = set( + db.execute(select(OvertimeAdjustment.date).where(OvertimeAdjustment.user_id == user.id)).scalars().all() + ) + existing_rule_dates = set( + db.execute(select(WeeklyTargetRule.effective_from).where(WeeklyTargetRule.user_id == user.id)).scalars().all() + ) + existing_vacation_keys = set( + db.execute( + select( + VacationPeriod.start_date, + VacationPeriod.end_date, + VacationPeriod.include_weekends, + VacationPeriod.notes, + ).where(VacationPeriod.user_id == user.id) + ).all() + ) + + conflicts = { + "time_entries": sum(1 for row in payload["time_entries"] if date.fromisoformat(row["date"]) in existing_time_entry_dates), + "special_day_statuses": sum( + 1 for row in payload["special_day_statuses"] if date.fromisoformat(row["date"]) in existing_special_dates + ), + "overtime_adjustments": sum( + 1 for row in payload["overtime_adjustments"] if date.fromisoformat(row["date"]) in existing_adjustment_dates + ), + "weekly_target_rules": sum( + 1 for row in payload["weekly_target_rules"] if date.fromisoformat(row["effective_from"]) in existing_rule_dates + ), + "vacation_periods": sum( + 1 + for row in payload["vacation_periods"] + if ( + date.fromisoformat(row["start_date"]), + date.fromisoformat(row["end_date"]), + row["include_weekends"], + row["notes"], + ) + in existing_vacation_keys + ), + } + + return { + **summarize_backup_payload(payload), + "mode": mode, + "mode_label": "Zusammenführen" if mode == IMPORT_MODE_MERGE else "Alle bisherigen Daten ersetzen", + "conflicts": conflicts, + } + + +def cleanup_import_previews(*, db: Session, user_id: str | None = None) -> None: + cutoff = utc_now() - timedelta(hours=IMPORT_PREVIEW_TTL_HOURS) + stmt = delete(ImportPreview).where(ImportPreview.created_at < cutoff) + if user_id: + stmt = stmt.where(ImportPreview.user_id == user_id) + db.execute(stmt) + + +def _preview_created_at(value: datetime) -> datetime: + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value + + +def create_import_preview_record(*, db: Session, user: User, payload: dict[str, Any], mode: str) -> ImportPreview: + cleanup_import_previews(db=db, user_id=user.id) + db.execute(delete(ImportPreview).where(ImportPreview.user_id == user.id)) + preview = ImportPreview(user_id=user.id, mode=mode, payload_json=json.dumps(payload, ensure_ascii=False)) + db.add(preview) + db.flush() + return preview + + +def get_import_preview_record(*, db: Session, user: User, preview_id: str) -> ImportPreview | None: + stmt = select(ImportPreview).where(ImportPreview.id == preview_id, ImportPreview.user_id == user.id) + preview = db.execute(stmt).scalar_one_or_none() + if preview is None: + return None + if _preview_created_at(preview.created_at) < utc_now() - timedelta(hours=IMPORT_PREVIEW_TTL_HOURS): + db.delete(preview) + db.flush() + return None + return preview + + +def parse_preview_payload(preview: ImportPreview) -> dict[str, Any]: + return load_backup_payload_from_bytes(preview.payload_json.encode("utf-8")) + + +def _apply_settings_from_backup(*, user: User, settings_data: dict[str, Any]) -> None: + user.weekly_target_minutes = settings_data["weekly_target_minutes"] + user.preferred_home_view = settings_data["preferred_home_view"] + user.preferred_month_view_mode = settings_data["preferred_month_view_mode"] + user.entry_mode = settings_data["entry_mode"] + user.working_days_csv = serialize_working_days(settings_data["working_days"]) + user.count_vacation_as_worktime = settings_data["count_vacation_as_worktime"] + user.count_holiday_as_worktime = settings_data["count_holiday_as_worktime"] + user.count_sick_as_worktime = settings_data["count_sick_as_worktime"] + user.automatic_break_rules_enabled = settings_data["automatic_break_rules_enabled"] + user.default_break_minutes = settings_data["default_break_minutes"] + user.overtime_start_date = date.fromisoformat(settings_data["overtime_start_date"]) if settings_data["overtime_start_date"] else None + user.overtime_expiry_days = settings_data["overtime_expiry_days"] + user.expire_negative_overtime = settings_data["expire_negative_overtime"] + user.vacation_days_total = settings_data["vacation_days_total"] + user.vacation_show_in_header = settings_data["vacation_show_in_header"] + user.workhours_counter_enabled = settings_data["workhours_counter_enabled"] + user.workhours_counter_show_in_header = settings_data["workhours_counter_show_in_header"] + user.workhours_counter_start_date = ( + date.fromisoformat(settings_data["workhours_counter_start_date"]) + if settings_data["workhours_counter_start_date"] + else None + ) + user.workhours_counter_end_date = ( + date.fromisoformat(settings_data["workhours_counter_end_date"]) + if settings_data["workhours_counter_end_date"] + else None + ) + user.workhours_counter_manual_offset_minutes = settings_data["workhours_counter_manual_offset_minutes"] + user.workhours_counter_target_minutes = settings_data["workhours_counter_target_minutes"] + user.workhours_counter_target_email_enabled = settings_data["workhours_counter_target_email_enabled"] + user.federal_state = settings_data["federal_state"] + + +def clear_importable_user_data(*, db: Session, user_id: str) -> None: + db.execute(delete(TimeEntry).where(TimeEntry.user_id == user_id)) + db.execute(delete(WeeklyTargetRule).where(WeeklyTargetRule.user_id == user_id)) + db.execute(delete(VacationPeriod).where(VacationPeriod.user_id == user_id)) + db.execute(delete(SpecialDayStatus).where(SpecialDayStatus.user_id == user_id)) + db.execute(delete(OvertimeAdjustment).where(OvertimeAdjustment.user_id == user_id)) + db.execute(delete(AutoEntrySkip).where(AutoEntrySkip.user_id == user_id)) + + +def execute_backup_import(*, db: Session, user: User, payload: dict[str, Any], mode: str) -> dict[str, Any]: + if mode not in supported_import_modes(): + raise BackupImportError("Ungültiger Importmodus.") + + created = { + "weekly_target_rules": 0, + "time_entries": 0, + "vacation_periods": 0, + "special_day_statuses": 0, + "overtime_adjustments": 0, + } + skipped = { + "weekly_target_rules": 0, + "time_entries": 0, + "vacation_periods": 0, + "special_day_statuses": 0, + "overtime_adjustments": 0, + } + + if mode == IMPORT_MODE_REPLACE: + clear_importable_user_data(db=db, user_id=user.id) + + _apply_settings_from_backup(user=user, settings_data=payload["settings"]) + + existing_rule_dates = set( + db.execute(select(WeeklyTargetRule.effective_from).where(WeeklyTargetRule.user_id == user.id)).scalars().all() + ) + existing_entry_dates = set(db.execute(select(TimeEntry.date).where(TimeEntry.user_id == user.id)).scalars().all()) + existing_vacation_keys = set( + db.execute( + select( + VacationPeriod.start_date, + VacationPeriod.end_date, + VacationPeriod.include_weekends, + VacationPeriod.notes, + ).where(VacationPeriod.user_id == user.id) + ).all() + ) + existing_special_dates = set( + db.execute(select(SpecialDayStatus.date).where(SpecialDayStatus.user_id == user.id)).scalars().all() + ) + existing_adjustment_dates = set( + db.execute(select(OvertimeAdjustment.date).where(OvertimeAdjustment.user_id == user.id)).scalars().all() + ) + + for row in payload["weekly_target_rules"]: + effective_from = date.fromisoformat(row["effective_from"]) + if mode == IMPORT_MODE_MERGE and effective_from in existing_rule_dates: + skipped["weekly_target_rules"] += 1 + continue + db.add( + WeeklyTargetRule( + user_id=user.id, + effective_from=effective_from, + weekly_target_minutes=row["weekly_target_minutes"], + ) + ) + existing_rule_dates.add(effective_from) + created["weekly_target_rules"] += 1 + + for row in payload["time_entries"]: + entry_date = date.fromisoformat(row["date"]) + if mode == IMPORT_MODE_MERGE and entry_date in existing_entry_dates: + skipped["time_entries"] += 1 + continue + db.add( + TimeEntry( + user_id=user.id, + date=entry_date, + start_minutes=row["start_minutes"], + end_minutes=row["end_minutes"], + break_minutes=row["break_minutes"], + break_rule_mode=row["break_rule_mode"], + notes=row["notes"], + ) + ) + existing_entry_dates.add(entry_date) + created["time_entries"] += 1 + + for row in payload["vacation_periods"]: + key = ( + date.fromisoformat(row["start_date"]), + date.fromisoformat(row["end_date"]), + row["include_weekends"], + row["notes"], + ) + if mode == IMPORT_MODE_MERGE and key in existing_vacation_keys: + skipped["vacation_periods"] += 1 + continue + db.add( + VacationPeriod( + user_id=user.id, + start_date=key[0], + end_date=key[1], + include_weekends=key[2], + notes=key[3], + ) + ) + existing_vacation_keys.add(key) + created["vacation_periods"] += 1 + + for row in payload["special_day_statuses"]: + status_date = date.fromisoformat(row["date"]) + if mode == IMPORT_MODE_MERGE and status_date in existing_special_dates: + skipped["special_day_statuses"] += 1 + continue + db.add( + SpecialDayStatus( + user_id=user.id, + date=status_date, + status=row["status"], + notes=row["notes"], + ) + ) + existing_special_dates.add(status_date) + created["special_day_statuses"] += 1 + + for row in payload["overtime_adjustments"]: + adjustment_date = date.fromisoformat(row["date"]) + if mode == IMPORT_MODE_MERGE and adjustment_date in existing_adjustment_dates: + skipped["overtime_adjustments"] += 1 + continue + db.add( + OvertimeAdjustment( + user_id=user.id, + date=adjustment_date, + minutes=row["minutes"], + notes=row["notes"], + ) + ) + existing_adjustment_dates.add(adjustment_date) + created["overtime_adjustments"] += 1 + + db.flush() + ensure_user_has_default_target_rule(db, user) + if user.entry_mode == ENTRY_MODE_AUTO_UNTIL_TODAY: + removed_future_auto_entries = delete_future_auto_entries(db=db, user_id=user.id, after_date=date.today()) + else: + removed_future_auto_entries = 0 + + return { + "mode": mode, + "created": created, + "skipped": skipped, + "removed_future_auto_entries": removed_future_auto_entries, + } diff --git a/app/services/legal_content.py b/app/services/legal_content.py new file mode 100644 index 0000000..178b2ab --- /dev/null +++ b/app/services/legal_content.py @@ -0,0 +1,242 @@ +from __future__ import annotations + +import markdown as markdown_lib +import bleach + +SITE_CONTENT_IMPRESSUM = 'impressum' +SITE_CONTENT_PRIVACY = 'datenschutz' + +DEFAULT_SITE_CONTENT_MARKDOWN = { + SITE_CONTENT_IMPRESSUM: """# Impressum + +Bitte vor dem produktiven Einsatz im Admin-Bereich vollständig ausfüllen. + +## Diensteanbieter + +Firmenname / Name +Straße und Hausnummer +PLZ Ort +Land + +## Kontakt + +E-Mail: [kontakt@example.com](mailto:kontakt@example.com) + +## Verantwortlich für den Inhalt nach § 18 Abs. 2 MStV + +Name der verantwortlichen Person +Straße und Hausnummer +PLZ Ort +Land +""", + SITE_CONTENT_PRIVACY: """# Datenschutzerklärung + +## 1. Verantwortlicher + +Bitte vor dem produktiven Einsatz im Admin-Bereich prüfen und anpassen. + +Verantwortlich für die Verarbeitung personenbezogener Daten im Zusammenhang mit dieser Website und Anwendung ist: + +Firmenname / Name +Straße und Hausnummer +PLZ Ort +Land +E-Mail: [kontakt@example.com](mailto:kontakt@example.com) + +## 2. Allgemeines zur Datenverarbeitung + +Ich verarbeite personenbezogene Daten nur, soweit dies zur Bereitstellung einer funktionsfähigen Website und Anwendung, zur Bearbeitung von Anfragen, zur Sicherheit des Dienstes sowie zur Erbringung der angebotenen Funktionen erforderlich ist. + +## 3. Aufruf der Website + +Beim Aufruf der Website werden technisch erforderliche Daten verarbeitet, um die Seite auszuliefern und die Stabilität und Sicherheit des Dienstes zu gewährleisten. + +Dabei können insbesondere folgende Daten verarbeitet werden: + +- IP-Adresse +- Datum und Uhrzeit des Abrufs +- aufgerufene Seite bzw. Ressource +- Informationen über Browser und Betriebssystem +- Referrer-Informationen +- Protokolldaten zu Sicherheits- und Fehlervorgängen + +Die Verarbeitung erfolgt zur technischen Bereitstellung, Systemsicherheit und Missbrauchserkennung. + +## 4. Registrierung und Benutzerkonto + +Wenn du ein Benutzerkonto anlegst, verarbeite ich die von dir angegebenen Registrierungsdaten, insbesondere: + +- E-Mail-Adresse +- Passwort in gehashter Form +- von dir hinterlegte Einstellungen innerhalb der Anwendung + +Die Verarbeitung erfolgt zum Zweck der Einrichtung und Verwaltung deines Benutzerkontos sowie zur Nutzung der Funktionen von Stundenfuchs. + +## 5. Nutzung der Anwendung + +Im Rahmen der Nutzung von Stundenfuchs verarbeite ich die von dir eingegebenen oder erzeugten Inhalte, insbesondere: + +- Arbeitszeiteinträge +- Pausenangaben +- Urlaubs-, Krankheits- und Feiertagseinträge +- Stundenausgleich +- Einstellungen zu Wochenstunden, relevanten Arbeitstagen und Auswertungen +- Backup-, Export- und Importdaten +- Angaben im Arbeitsstunden-Counter + +Diese Daten werden verarbeitet, um dir die Funktionen der Anwendung bereitzustellen. + +## 6. Anmeldung, Sitzungen und Sicherheit + +Zur Anmeldung und sicheren Nutzung der Anwendung werden technisch notwendige Sitzungsdaten verarbeitet. Außerdem können sicherheitsrelevante Daten verarbeitet werden, insbesondere zur: + +- Login-Verwaltung +- Erkennung missbräuchlicher Zugriffe +- Durchsetzung von Sicherheitsmaßnahmen +- Begrenzung fehlerhafter Login- oder Formularversuche + +## 7. E-Mail-Funktionen + +Im Zusammenhang mit der Nutzung von Stundenfuchs können E-Mails versendet werden, insbesondere für: + +- E-Mail-Bestätigung +- Passwort-Reset +- sicherheitsrelevante Hinweise +- Benachrichtigungen innerhalb der Anwendung +- Kontaktanfragen bzw. Tickets + +Dafür werden insbesondere E-Mail-Adresse und die jeweils zur Nachricht erforderlichen Metadaten verarbeitet. + +## 8. Zwei-Faktor-Authentifizierung + +Wenn du die Zwei-Faktor-Authentifizierung aktivierst, werden die dafür erforderlichen Sicherheitsdaten verarbeitet, um die zusätzliche Anmeldung per Authenticator-App zu ermöglichen. + +## 9. Kontaktformular und Ticketsystem + +Wenn du das Kontaktformular nutzt oder ein Ticket erstellst, verarbeite ich die von dir übermittelten Angaben, insbesondere: + +- Name +- E-Mail-Adresse +- Kategorie der Anfrage +- Betreff +- Nachricht +- technische Missbrauchsschutzdaten + +Die Verarbeitung erfolgt zur Bearbeitung deiner Anfrage, zur Kommunikation mit dir sowie zur Abwehr von Missbrauch und Spam. + +## 10. Export und Backup + +Wenn du Export- oder Backup-Funktionen nutzt, werden die von dir innerhalb der Anwendung gespeicherten Daten zusammengestellt und zum Download bereitgestellt. Diese Verarbeitung erfolgt ausschließlich zur Durchführung der von dir ausgelösten Funktion. + +## 11. Rechtsgrundlagen + +Soweit die Verarbeitung zur Bereitstellung und Durchführung der Funktionen von Stundenfuchs erforderlich ist, erfolgt sie auf Grundlage von Art. 6 Abs. 1 lit. b DSGVO. + +Soweit die Verarbeitung zur Gewährleistung der Sicherheit, Stabilität und Missbrauchsvermeidung erfolgt, beruht sie auf Art. 6 Abs. 1 lit. f DSGVO. Das berechtigte Interesse liegt in der sicheren, funktionsfähigen und wirtschaftlichen Bereitstellung des Dienstes. + +Soweit du mich kontaktierst, erfolgt die Verarbeitung je nach Inhalt deiner Anfrage auf Art. 6 Abs. 1 lit. b DSGVO oder Art. 6 Abs. 1 lit. f DSGVO. + +## 12. Empfänger von Daten + +Personenbezogene Daten werden nur insoweit weitergegeben, wie dies für den Betrieb der Anwendung technisch erforderlich ist oder eine gesetzliche Verpflichtung besteht. + +Hosting-, E-Mail- und sonstige Empfängerangaben müssen für den konkreten Produktivbetrieb ergänzt werden. + +## 13. Speicherdauer + +Personenbezogene Daten werden nur so lange gespeichert, wie dies für die jeweiligen Zwecke erforderlich ist oder gesetzliche Aufbewahrungspflichten bestehen. + +Kontodaten und in der Anwendung gespeicherte Inhalte werden grundsätzlich so lange gespeichert, wie dein Benutzerkonto besteht, sofern keine gesetzlichen Pflichten entgegenstehen. + +Kontaktanfragen und Tickets werden gespeichert, soweit dies zur Bearbeitung, Dokumentation und Missbrauchsabwehr erforderlich ist. + +## 14. Deine Rechte + +Du hast nach Maßgabe der gesetzlichen Vorschriften das Recht auf: + +- Auskunft über die verarbeiteten personenbezogenen Daten +- Berichtigung unrichtiger Daten +- Löschung +- Einschränkung der Verarbeitung +- Datenübertragbarkeit +- Widerspruch gegen Verarbeitungen auf Grundlage berechtigter Interessen + +Wenn eine Verarbeitung auf einer Einwilligung beruht, kannst du diese jederzeit mit Wirkung für die Zukunft widerrufen. + +## 15. Beschwerderecht + +Du hast das Recht, dich bei einer Datenschutzaufsichtsbehörde zu beschweren. + +## 16. Pflicht zur Bereitstellung von Daten + +Soweit personenbezogene Daten für die Registrierung, Anmeldung oder Nutzung der Anwendung erforderlich sind, ist die Bereitstellung dieser Daten notwendig. Ohne diese Daten kann Stundenfuchs ganz oder teilweise nicht genutzt werden. + +## 17. Keine automatisierte Entscheidungsfindung + +Eine automatisierte Entscheidungsfindung einschließlich Profiling im Sinne von Art. 22 DSGVO findet nicht statt. + +## 18. Keine Analyse- oder Drittinhalte + +Es werden keine Analyse- oder Trackingdienste eingesetzt. + +Es werden keine externen Schriftarten, kein externes Fehlertracking und keine eingebetteten Drittinhalte verwendet. + +## 19. Stand + +Stand: März 2026 +""", +} + +_ALLOWED_TAGS = [ + 'a', 'blockquote', 'br', 'code', 'em', 'h1', 'h2', 'h3', 'h4', 'li', 'ol', 'p', 'pre', 'strong', 'ul' +] +_ALLOWED_ATTRIBUTES = { + 'a': ['href', 'title', 'rel', 'target'], +} +_ALLOWED_PROTOCOLS = ['http', 'https', 'mailto'] + + +def default_site_content_markdown(key: str) -> str: + return DEFAULT_SITE_CONTENT_MARKDOWN.get(key, '') + + +def render_safe_markdown(markdown_text: str) -> str: + raw_html = markdown_lib.markdown( + markdown_text or '', + extensions=['extra', 'sane_lists'], + output_format='html5', + ) + cleaned = bleach.clean( + raw_html, + tags=_ALLOWED_TAGS, + attributes=_ALLOWED_ATTRIBUTES, + protocols=_ALLOWED_PROTOCOLS, + strip=True, + ) + return bleach.linkify(cleaned) + + +def normalize_markdown_input(value: str) -> str: + return (value or '').strip() + + +def ticket_status_label(status: str) -> str: + return { + 'open': 'Offen', + 'closed': 'Geschlossen', + }.get(status, status) + + +def ticket_category_options() -> list[dict[str, str]]: + return [ + {'value': 'problem', 'label': 'Problem'}, + {'value': 'feature', 'label': 'Featurerequest'}, + {'value': 'other', 'label': 'Sonstiges'}, + ] + + +def ticket_category_label(value: str) -> str: + for item in ticket_category_options(): + if item['value'] == value: + return item['label'] + return value diff --git a/app/services/migrations.py b/app/services/migrations.py new file mode 100644 index 0000000..fa1af86 --- /dev/null +++ b/app/services/migrations.py @@ -0,0 +1,181 @@ +from sqlalchemy import text +from sqlalchemy.engine import Engine + + +def _table_columns(engine: Engine, table_name: str) -> set[str]: + with engine.connect() as conn: + rows = conn.execute(text(f"PRAGMA table_info({table_name})")).mappings().all() + return {row["name"] for row in rows} + + +def run_startup_migrations(engine: Engine) -> None: + if engine.dialect.name != "sqlite": + return + + user_columns = _table_columns(engine, "users") + + statements: list[str] = [] + if "preferred_home_view" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN preferred_home_view VARCHAR(16) NOT NULL DEFAULT 'week'") + if "preferred_month_view_mode" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN preferred_month_view_mode VARCHAR(16) NOT NULL DEFAULT 'flat'") + if "entry_mode" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN entry_mode VARCHAR(16) NOT NULL DEFAULT 'manual'") + if "working_days_csv" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN working_days_csv VARCHAR(32) NOT NULL DEFAULT '0,1,2,3,4'") + if "count_vacation_as_worktime" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN count_vacation_as_worktime BOOLEAN NOT NULL DEFAULT 0") + if "count_holiday_as_worktime" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN count_holiday_as_worktime BOOLEAN NOT NULL DEFAULT 0") + if "count_sick_as_worktime" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN count_sick_as_worktime BOOLEAN NOT NULL DEFAULT 0") + if "automatic_break_rules_enabled" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN automatic_break_rules_enabled BOOLEAN NOT NULL DEFAULT 0") + if "default_break_minutes" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN default_break_minutes INTEGER NOT NULL DEFAULT 0") + if "overtime_start_date" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN overtime_start_date DATE") + if "overtime_expiry_days" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN overtime_expiry_days INTEGER") + if "expire_negative_overtime" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN expire_negative_overtime BOOLEAN NOT NULL DEFAULT 0") + if "vacation_days_total" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN vacation_days_total INTEGER NOT NULL DEFAULT 0") + if "vacation_show_in_header" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN vacation_show_in_header BOOLEAN NOT NULL DEFAULT 1") + if "workhours_counter_enabled" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_enabled BOOLEAN NOT NULL DEFAULT 0") + if "workhours_counter_show_in_header" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_show_in_header BOOLEAN NOT NULL DEFAULT 0") + if "workhours_counter_start_date" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_start_date DATE") + if "workhours_counter_end_date" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_end_date DATE") + if "workhours_counter_manual_offset_minutes" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_manual_offset_minutes INTEGER NOT NULL DEFAULT 0") + if "workhours_counter_target_minutes" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_target_minutes INTEGER") + if "workhours_counter_target_email_enabled" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_target_email_enabled BOOLEAN NOT NULL DEFAULT 0") + if "workhours_counter_warning_last_sent_on" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_warning_last_sent_on DATE") + if "workhours_counter_warning_last_sent_key" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN workhours_counter_warning_last_sent_key VARCHAR(120)") + if "federal_state" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN federal_state VARCHAR(8)") + if "email_verified" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN email_verified BOOLEAN NOT NULL DEFAULT 1") + if "email_verification_token_hash" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN email_verification_token_hash VARCHAR(128)") + if "email_verification_expires_at" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN email_verification_expires_at DATETIME") + if "email_verification_sent_at" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN email_verification_sent_at DATETIME") + if "mfa_method" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN mfa_method VARCHAR(16) NOT NULL DEFAULT 'none'") + if "mfa_totp_secret_encrypted" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN mfa_totp_secret_encrypted TEXT") + if "mfa_email_code_hash" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN mfa_email_code_hash VARCHAR(255)") + if "mfa_email_code_expires_at" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN mfa_email_code_expires_at DATETIME") + if "mfa_email_code_sent_at" not in user_columns: + statements.append("ALTER TABLE users ADD COLUMN mfa_email_code_sent_at DATETIME") + + email_config_columns = _table_columns(engine, "email_server_config") + if "registration_admin_notify_enabled" not in email_config_columns: + statements.append("ALTER TABLE email_server_config ADD COLUMN registration_admin_notify_enabled BOOLEAN NOT NULL DEFAULT 1") + if "registration_admin_notify_admin_ids_csv" not in email_config_columns: + statements.append("ALTER TABLE email_server_config ADD COLUMN registration_admin_notify_admin_ids_csv VARCHAR(1024)") + + time_entry_columns = _table_columns(engine, "time_entries") + if "break_rule_mode" not in time_entry_columns: + statements.append("ALTER TABLE time_entries ADD COLUMN break_rule_mode VARCHAR(16) NOT NULL DEFAULT 'manual'") + + if not statements: + return + + with engine.begin() as conn: + for statement in statements: + conn.execute(text(statement)) + conn.execute(text("UPDATE users SET entry_mode = 'auto_until_today' WHERE entry_mode = 'auto'")) + conn.execute( + text("CREATE INDEX IF NOT EXISTS ix_users_email_verification_token_hash ON users (email_verification_token_hash)") + ) + conn.execute( + text( + """ + CREATE TABLE IF NOT EXISTS overtime_adjustments ( + id VARCHAR(36) PRIMARY KEY NOT NULL, + user_id VARCHAR(36) NOT NULL, + date DATE NOT NULL, + minutes INTEGER NOT NULL, + notes TEXT, + created_at DATETIME NOT NULL, + FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE CASCADE, + CONSTRAINT uq_user_overtime_adjustment_date UNIQUE (user_id, date) + ) + """ + ) + ) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_overtime_adjustments_user_id ON overtime_adjustments (user_id)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_overtime_adjustments_date ON overtime_adjustments (date)")) + conn.execute( + text( + """ + CREATE TABLE IF NOT EXISTS auto_entry_skips ( + id VARCHAR(36) PRIMARY KEY NOT NULL, + user_id VARCHAR(36) NOT NULL, + date DATE NOT NULL, + created_at DATETIME NOT NULL, + FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE CASCADE, + CONSTRAINT uq_user_auto_entry_skip_date UNIQUE (user_id, date) + ) + """ + ) + ) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_auto_entry_skips_user_id ON auto_entry_skips (user_id)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_auto_entry_skips_date ON auto_entry_skips (date)")) + conn.execute( + text( + """ + CREATE TABLE IF NOT EXISTS site_content ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + key VARCHAR(64) NOT NULL UNIQUE, + markdown_text TEXT NOT NULL DEFAULT '', + updated_by_user_id VARCHAR(36), + updated_at DATETIME, + FOREIGN KEY(updated_by_user_id) REFERENCES users (id) ON DELETE SET NULL + ) + """ + ) + ) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_site_content_key ON site_content (key)")) + conn.execute( + text( + """ + CREATE TABLE IF NOT EXISTS support_tickets ( + id VARCHAR(36) PRIMARY KEY NOT NULL, + user_id VARCHAR(36), + category VARCHAR(24) NOT NULL DEFAULT 'problem', + status VARCHAR(24) NOT NULL DEFAULT 'open', + name VARCHAR(255) NOT NULL DEFAULT '', + email VARCHAR(255) NOT NULL, + subject VARCHAR(255) NOT NULL, + message TEXT NOT NULL, + admin_notes TEXT, + source_ip_hash VARCHAR(128), + source_user_agent VARCHAR(512), + created_at DATETIME NOT NULL, + updated_at DATETIME, + closed_at DATETIME, + FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE SET NULL + ) + """ + ) + ) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_support_tickets_user_id ON support_tickets (user_id)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_support_tickets_email ON support_tickets (email)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_support_tickets_status ON support_tickets (status)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_support_tickets_source_ip_hash ON support_tickets (source_ip_hash)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS ix_support_tickets_created_at ON support_tickets (created_at)")) diff --git a/app/services/overtime.py b/app/services/overtime.py new file mode 100644 index 0000000..9096867 --- /dev/null +++ b/app/services/overtime.py @@ -0,0 +1,246 @@ +from datetime import date, timedelta + +from app.services.calculations import compute_net_minutes +from app.services.targets import monday_of, target_map_for_weeks, week_starts_between +from app.services.vacations import expand_vacation_dates +from app.services.workdays import DEFAULT_WORKING_DAYS, is_workday + + +def compute_effective_span_totals( + *, + entries: list, + range_start: date, + range_end: date, + weekly_target_minutes: int, + vacation_dates: set[date] | None, + non_working_dates: set[date] | None, + count_as_worktime_dates: set[date] | None, + overtime_adjustment_minutes_by_date: dict[date, int] | None, + overtime_start_date: date | None, + relevant_weekdays: set[int] | None = None, +) -> dict[str, int]: + if range_end < range_start: + return { + "ist_minutes": 0, + "soll_minutes": 0, + "delta_minutes": 0, + "eligible_workdays": 0, + "vacation_workdays": 0, + } + + blocked_before = overtime_start_date + vacation_dates = vacation_dates or set() + non_working_dates = non_working_dates or set() + count_as_worktime_dates = count_as_worktime_dates or set() + overtime_adjustment_minutes_by_date = overtime_adjustment_minutes_by_date or {} + relevant_weekdays = relevant_weekdays or set(DEFAULT_WORKING_DAYS) + workdays_per_week = max(1, len(relevant_weekdays)) + + net_by_date: dict[date, int] = {} + for entry in entries: + if entry.date < range_start or entry.date > range_end: + continue + net_by_date[entry.date] = compute_net_minutes( + entry.start_minutes, + entry.end_minutes, + entry.break_minutes, + ) + + eligible_workdays = 0 + vacation_workdays = 0 + ist_minutes = 0 + overtime_adjustment_minutes = 0 + current = range_start + while current <= range_end: + overtime_adjustment_minutes += int(overtime_adjustment_minutes_by_date.get(current, 0)) + if blocked_before is None or current >= blocked_before: + day_counts_as_worktime = current in count_as_worktime_dates and is_workday(current, relevant_weekdays) + day_target_minutes = int(round(weekly_target_minutes / workdays_per_week)) if is_workday(current, relevant_weekdays) else 0 + if day_counts_as_worktime: + ist_minutes += day_target_minutes + elif current not in non_working_dates: + ist_minutes += net_by_date.get(current, 0) + if is_workday(current, relevant_weekdays): + if current in vacation_dates and not day_counts_as_worktime: + vacation_workdays += 1 + elif current in non_working_dates and not day_counts_as_worktime: + pass + else: + eligible_workdays += 1 + current += timedelta(days=1) + + soll_minutes = int(round((weekly_target_minutes / workdays_per_week) * eligible_workdays)) + delta_minutes = ist_minutes - soll_minutes + overtime_adjustment_minutes + return { + "ist_minutes": ist_minutes, + "soll_minutes": soll_minutes, + "delta_minutes": delta_minutes, + "eligible_workdays": eligible_workdays, + "vacation_workdays": vacation_workdays, + "overtime_adjustment_minutes": overtime_adjustment_minutes, + } + + +def compute_effective_week_totals( + *, + entries: list, + week_start: date, + weekly_target_minutes: int, + vacation_dates: set[date] | None, + non_working_dates: set[date] | None, + count_as_worktime_dates: set[date] | None, + overtime_adjustment_minutes_by_date: dict[date, int] | None, + overtime_start_date: date | None, + relevant_weekdays: set[int] | None = None, +) -> dict[str, int]: + week_end = week_start + timedelta(days=6) + totals = compute_effective_span_totals( + entries=entries, + range_start=week_start, + range_end=week_end, + weekly_target_minutes=weekly_target_minutes, + vacation_dates=vacation_dates, + non_working_dates=non_working_dates, + count_as_worktime_dates=count_as_worktime_dates, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_by_date, + overtime_start_date=overtime_start_date, + relevant_weekdays=relevant_weekdays, + ) + return { + "weekly_ist": totals["ist_minutes"], + "weekly_soll": totals["soll_minutes"], + "weekly_delta": totals["delta_minutes"], + } + + +def compute_cumulative_overtime_minutes( + *, + entries: list, + rules: list, + weekly_target_fallback: int, + vacation_periods: list, + non_working_dates: set[date] | None, + count_as_worktime_dates: set[date] | None, + overtime_adjustment_minutes_by_date: dict[date, int] | None, + selected_week_start: date, + overtime_start_date: date | None, + overtime_expiry_days: int | None, + expire_negative_overtime: bool, + relevant_weekdays: set[int] | None = None, +) -> int: + selected_week_end = selected_week_start + timedelta(days=6) + return compute_cumulative_overtime_until_date( + entries=entries, + rules=rules, + weekly_target_fallback=weekly_target_fallback, + vacation_periods=vacation_periods, + non_working_dates=non_working_dates, + count_as_worktime_dates=count_as_worktime_dates, + overtime_adjustment_minutes_by_date=overtime_adjustment_minutes_by_date, + as_of_date=selected_week_end, + overtime_start_date=overtime_start_date, + overtime_expiry_days=overtime_expiry_days, + expire_negative_overtime=expire_negative_overtime, + relevant_weekdays=relevant_weekdays, + ) + + +def compute_cumulative_overtime_until_date( + *, + entries: list, + rules: list, + weekly_target_fallback: int, + vacation_periods: list, + non_working_dates: set[date] | None, + count_as_worktime_dates: set[date] | None, + overtime_adjustment_minutes_by_date: dict[date, int] | None, + as_of_date: date, + overtime_start_date: date | None, + overtime_expiry_days: int | None, + expire_negative_overtime: bool, + relevant_weekdays: set[int] | None = None, +) -> int: + relevant_weekdays = relevant_weekdays or set(DEFAULT_WORKING_DAYS) + workdays_per_week = max(1, len(relevant_weekdays)) + overtime_adjustment_minutes_by_date = overtime_adjustment_minutes_by_date or {} + + earliest_entry_date = min((entry.date for entry in entries), default=None) + earliest_adjustment_date = min(overtime_adjustment_minutes_by_date.keys(), default=None) + + range_start_candidates = [candidate for candidate in [earliest_entry_date, earliest_adjustment_date] if candidate is not None] + if not range_start_candidates: + return 0 + + range_start = min(range_start_candidates) + + if range_start > as_of_date: + return 0 + + first_week_start = monday_of(range_start) + relevant_weeks = week_starts_between(first_week_start, monday_of(as_of_date)) + base_target_map = target_map_for_weeks(rules, relevant_weeks, weekly_target_fallback) + + vacation_dates = expand_vacation_dates( + vacation_periods, + range_start, + as_of_date, + relevant_weekdays=relevant_weekdays, + ) + non_working_dates = non_working_dates or set() + count_as_worktime_dates = count_as_worktime_dates or set() + + net_by_date: dict[date, int] = {} + for entry in entries: + if entry.date < range_start or entry.date > as_of_date: + continue + net_by_date[entry.date] = compute_net_minutes( + entry.start_minutes, + entry.end_minutes, + entry.break_minutes, + ) + + cutoff_date: date | None = None + if overtime_expiry_days is not None and overtime_expiry_days > 0: + cutoff_date = as_of_date - timedelta(days=overtime_expiry_days) + + total = 0.0 + current = range_start + while current <= as_of_date: + week_start = monday_of(current) + weekly_target = base_target_map.get(week_start, weekly_target_fallback) + day_adjustment = float(overtime_adjustment_minutes_by_date.get(current, 0)) + + regular_delta_allowed = overtime_start_date is None or current >= overtime_start_date + + day_counts_as_worktime = current in count_as_worktime_dates and current.weekday() in relevant_weekdays + + if regular_delta_allowed and current.weekday() in relevant_weekdays and (current not in vacation_dates or day_counts_as_worktime): + if current in non_working_dates and not day_counts_as_worktime: + day_target = 0.0 + else: + day_target = weekly_target / workdays_per_week + else: + day_target = 0.0 + + if regular_delta_allowed: + if day_counts_as_worktime: + day_net = day_target + else: + day_net = 0.0 if current in non_working_dates else float(net_by_date.get(current, 0)) + else: + day_net = 0.0 + delta = day_net - day_target + day_adjustment + + expired = cutoff_date is not None and current < cutoff_date + if expired: + if delta > 0: + current += timedelta(days=1) + continue + if delta < 0 and expire_negative_overtime: + current += timedelta(days=1) + continue + + total += delta + current += timedelta(days=1) + + return int(round(total)) diff --git a/app/services/public_holidays.py b/app/services/public_holidays.py new file mode 100644 index 0000000..04bd500 --- /dev/null +++ b/app/services/public_holidays.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from datetime import date + +import holidays + + +GERMAN_STATE_OPTIONS: list[dict[str, str]] = [ + {"code": "BW", "label": "Baden-Württemberg"}, + {"code": "BY", "label": "Bayern"}, + {"code": "BE", "label": "Berlin"}, + {"code": "BB", "label": "Brandenburg"}, + {"code": "HB", "label": "Bremen"}, + {"code": "HH", "label": "Hamburg"}, + {"code": "HE", "label": "Hessen"}, + {"code": "MV", "label": "Mecklenburg-Vorpommern"}, + {"code": "NI", "label": "Niedersachsen"}, + {"code": "NW", "label": "Nordrhein-Westfalen"}, + {"code": "RP", "label": "Rheinland-Pfalz"}, + {"code": "SL", "label": "Saarland"}, + {"code": "SN", "label": "Sachsen"}, + {"code": "ST", "label": "Sachsen-Anhalt"}, + {"code": "SH", "label": "Schleswig-Holstein"}, + {"code": "TH", "label": "Thüringen"}, +] +GERMAN_STATE_CODES = {item["code"] for item in GERMAN_STATE_OPTIONS} + + +def normalize_german_state_code(value: str | None) -> str | None: + if value is None: + return None + normalized = value.strip().upper() + if not normalized: + return None + if normalized not in GERMAN_STATE_CODES: + return None + return normalized + + +def list_public_holiday_dates( + *, + federal_state: str, + from_date: date, + to_date: date, +) -> set[date]: + if to_date < from_date: + return set() + years = list(range(from_date.year, to_date.year + 1)) + holiday_map = holidays.country_holidays("DE", subdiv=federal_state, years=years) + result: set[date] = set() + for holiday_date in holiday_map.keys(): + if from_date <= holiday_date <= to_date: + result.add(holiday_date) + return result diff --git a/app/services/security.py b/app/services/security.py new file mode 100644 index 0000000..230fcc0 --- /dev/null +++ b/app/services/security.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from base64 import urlsafe_b64encode +from datetime import datetime, timezone +import hashlib +import secrets + +from cryptography.fernet import Fernet, InvalidToken +import pyotp + + +def utc_now() -> datetime: + return datetime.now(timezone.utc) + + +def _derive_fernet_key(source: str) -> bytes: + digest = hashlib.sha256(source.encode("utf-8")).digest() + return urlsafe_b64encode(digest) + + +def build_fernet(secret_source: str) -> Fernet: + return Fernet(_derive_fernet_key(secret_source)) + + +def encrypt_secret(fernet: Fernet, value: str) -> str: + return fernet.encrypt(value.encode("utf-8")).decode("utf-8") + + +def decrypt_secret(fernet: Fernet, value: str | None) -> str | None: + if not value: + return None + try: + return fernet.decrypt(value.encode("utf-8")).decode("utf-8") + except InvalidToken: + return None + + +def generate_numeric_code(length: int = 6) -> str: + if length <= 0: + raise ValueError("length must be positive") + lower = 10 ** (length - 1) + upper = (10**length) - 1 + return str(secrets.randbelow(upper - lower + 1) + lower) + + +def hash_token(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + +def generate_reset_token() -> str: + return secrets.token_urlsafe(48) + + +def normalize_otp_code(code: str) -> str: + return "".join(ch for ch in code.strip() if ch.isdigit()) + + +def generate_totp_secret() -> str: + return pyotp.random_base32() + + +def build_totp_uri(*, secret: str, account_name: str, issuer: str = "Stundenfuchs") -> str: + return pyotp.TOTP(secret).provisioning_uri(name=account_name, issuer_name=issuer) + + +def verify_totp_code(*, secret: str, code: str) -> bool: + normalized = normalize_otp_code(code) + if len(normalized) != 6: + return False + return bool(pyotp.TOTP(secret).verify(normalized, valid_window=1)) diff --git a/app/services/targets.py b/app/services/targets.py new file mode 100644 index 0000000..713845b --- /dev/null +++ b/app/services/targets.py @@ -0,0 +1,148 @@ +from datetime import date, timedelta + +from sqlalchemy import delete, select +from sqlalchemy.orm import Session + +from app.models import User, WeeklyTargetRule + +DEFAULT_REFERENCE_WEEK_START = date(1970, 1, 5) # Montag + + +def monday_of(day: date) -> date: + return day - timedelta(days=day.weekday()) + + +def week_starts_between(start_week_start: date, end_week_start: date) -> list[date]: + weeks: list[date] = [] + current = start_week_start + while current <= end_week_start: + weeks.append(current) + current += timedelta(days=7) + return weeks + + +def list_rules_for_user(db: Session, user_id: str) -> list[WeeklyTargetRule]: + stmt = ( + select(WeeklyTargetRule) + .where(WeeklyTargetRule.user_id == user_id) + .order_by(WeeklyTargetRule.effective_from.asc()) + ) + return db.execute(stmt).scalars().all() + + +def target_for_week( + rules: list[WeeklyTargetRule], + week_start: date, + fallback_minutes: int, +) -> int: + target = fallback_minutes + for rule in rules: + if rule.effective_from <= week_start: + target = rule.weekly_target_minutes + else: + break + return target + + +def target_map_for_weeks( + rules: list[WeeklyTargetRule], + week_starts: list[date], + fallback_minutes: int, +) -> dict[date, int]: + result: dict[date, int] = {} + for week_start in week_starts: + result[week_start] = target_for_week(rules, week_start, fallback_minutes) + return result + + +def upsert_rule(db: Session, user_id: str, effective_from: date, weekly_target_minutes: int) -> None: + stmt = select(WeeklyTargetRule).where( + WeeklyTargetRule.user_id == user_id, + WeeklyTargetRule.effective_from == effective_from, + ) + rule = db.execute(stmt).scalar_one_or_none() + + if rule: + rule.weekly_target_minutes = weekly_target_minutes + return + + db.add( + WeeklyTargetRule( + user_id=user_id, + effective_from=effective_from, + weekly_target_minutes=weekly_target_minutes, + ) + ) + + +def ensure_user_has_default_target_rule(db: Session, user: User) -> None: + stmt = select(WeeklyTargetRule.id).where(WeeklyTargetRule.user_id == user.id).limit(1) + existing = db.execute(stmt).scalar_one_or_none() + if existing: + return + + db.add( + WeeklyTargetRule( + user_id=user.id, + effective_from=DEFAULT_REFERENCE_WEEK_START, + weekly_target_minutes=user.weekly_target_minutes, + ) + ) + + +def ensure_all_users_have_default_target_rules(db: Session) -> None: + users = db.execute(select(User)).scalars().all() + changed = False + for user in users: + before_count = db.execute( + select(WeeklyTargetRule.id).where(WeeklyTargetRule.user_id == user.id).limit(1) + ).scalar_one_or_none() + if before_count: + continue + ensure_user_has_default_target_rule(db, user) + changed = True + + if changed: + db.commit() + + +def apply_weekly_target_change( + db: Session, + *, + user: User, + selected_week_start: date, + new_target_minutes: int, + scope: str, +) -> None: + rules = list_rules_for_user(db, user.id) + fallback = user.weekly_target_minutes + + if scope == "all_weeks": + db.execute(delete(WeeklyTargetRule).where(WeeklyTargetRule.user_id == user.id)) + db.add( + WeeklyTargetRule( + user_id=user.id, + effective_from=DEFAULT_REFERENCE_WEEK_START, + weekly_target_minutes=new_target_minutes, + ) + ) + return + + if scope == "from_current_week": + db.execute( + delete(WeeklyTargetRule).where( + WeeklyTargetRule.user_id == user.id, + WeeklyTargetRule.effective_from >= selected_week_start, + ) + ) + upsert_rule(db, user.id, selected_week_start, new_target_minutes) + return + + if scope == "current_week": + next_week_start = selected_week_start + timedelta(days=7) + target_next_week_before = target_for_week(rules, next_week_start, fallback) + upsert_rule(db, user.id, selected_week_start, new_target_minutes) + upsert_rule(db, user.id, next_week_start, target_next_week_before) + return + + raise ValueError("Ungueltiger Scope") diff --git a/app/services/vacations.py b/app/services/vacations.py new file mode 100644 index 0000000..5f95dab --- /dev/null +++ b/app/services/vacations.py @@ -0,0 +1,162 @@ +from datetime import date, timedelta + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.models import VacationPeriod +from app.services.workdays import DEFAULT_WORKING_DAYS + + +def daterange(start: date, end: date): + current = start + while current <= end: + yield current + current += timedelta(days=1) + + +def list_vacations_for_user( + db: Session, + user_id: str, + from_date: date, + to_date: date, +) -> list[VacationPeriod]: + stmt = ( + select(VacationPeriod) + .where( + VacationPeriod.user_id == user_id, + VacationPeriod.end_date >= from_date, + VacationPeriod.start_date <= to_date, + ) + .order_by(VacationPeriod.start_date.asc()) + ) + return db.execute(stmt).scalars().all() + + +def expand_vacation_dates( + periods: list[VacationPeriod], + from_date: date, + to_date: date, + relevant_weekdays: set[int] | None = None, +) -> set[date]: + dates: set[date] = set() + + for period in periods: + start = max(period.start_date, from_date) + end = min(period.end_date, to_date) + if end < start: + continue + + for day in daterange(start, end): + if not period.include_weekends: + if relevant_weekdays is None: + if day.weekday() >= 5: + continue + elif day.weekday() not in relevant_weekdays: + continue + dates.add(day) + + return dates + + +def collapse_dates_to_ranges(days: set[date]) -> list[tuple[date, date]]: + if not days: + return [] + + ordered = sorted(days) + ranges: list[tuple[date, date]] = [] + start = ordered[0] + end = ordered[0] + + for current in ordered[1:]: + if current == end + timedelta(days=1): + end = current + continue + ranges.append((start, end)) + start = current + end = current + + ranges.append((start, end)) + return ranges + + +def vacation_workdays_in_week( + vacation_dates: set[date], + week_start: date, + relevant_weekdays: set[int] | None = None, +) -> int: + relevant_weekdays = relevant_weekdays or set(DEFAULT_WORKING_DAYS) + count = 0 + for index in range(7): + day = week_start + timedelta(days=index) + if day in vacation_dates and day.weekday() in relevant_weekdays: + count += 1 + return count + + +def effective_week_target( + base_target_minutes: int, + vacation_workdays: int, + *, + workdays_per_week: int = 5, +) -> int: + if vacation_workdays <= 0: + return base_target_minutes + + workdays_per_week = max(1, workdays_per_week) + vacation_workdays = min(vacation_workdays, workdays_per_week) + day_target = base_target_minutes / workdays_per_week + reduced = int(round(base_target_minutes - (day_target * vacation_workdays))) + return max(0, reduced) + + +def apply_vacation_to_week_targets( + base_target_map: dict[date, int], + vacation_dates: set[date], + relevant_weekdays: set[int] | None = None, +) -> dict[date, int]: + relevant_weekdays = relevant_weekdays or set(DEFAULT_WORKING_DAYS) + workdays_per_week = max(1, len(relevant_weekdays)) + effective_map: dict[date, int] = {} + for week_start, base_target in base_target_map.items(): + vacation_days = vacation_workdays_in_week(vacation_dates, week_start, relevant_weekdays) + effective_map[week_start] = effective_week_target( + base_target, + vacation_days, + workdays_per_week=workdays_per_week, + ) + return effective_map + + +def vacation_dates_for_weeks( + periods: list[VacationPeriod], + week_starts: list[date], + relevant_weekdays: set[int] | None = None, +) -> set[date]: + if not week_starts: + return set() + + from_date = min(week_starts) + to_date = max(week_starts) + timedelta(days=6) + return expand_vacation_dates(periods, from_date, to_date, relevant_weekdays=relevant_weekdays) + + +def week_target_map_with_vacations( + base_target_map: dict[date, int], + periods: list[VacationPeriod], + relevant_weekdays: set[int] | None = None, +) -> dict[date, int]: + vacation_dates = vacation_dates_for_weeks(periods, list(base_target_map.keys()), relevant_weekdays=relevant_weekdays) + return apply_vacation_to_week_targets(base_target_map, vacation_dates, relevant_weekdays) + + +def vacations_by_week( + periods: list[VacationPeriod], + week_starts: list[date], + relevant_weekdays: set[int] | None = None, +) -> dict[date, int]: + relevant_weekdays = relevant_weekdays or set(DEFAULT_WORKING_DAYS) + vacation_dates = vacation_dates_for_weeks(periods, week_starts, relevant_weekdays=relevant_weekdays) + result: dict[date, int] = {} + for week_start in week_starts: + result[week_start] = vacation_workdays_in_week(vacation_dates, week_start, relevant_weekdays) + return result diff --git a/app/services/workdays.py b/app/services/workdays.py new file mode 100644 index 0000000..f12d4ed --- /dev/null +++ b/app/services/workdays.py @@ -0,0 +1,37 @@ +from datetime import date + + +DEFAULT_WORKING_DAYS = (0, 1, 2, 3, 4) + + +def normalize_working_days(days: list[int] | set[int] | tuple[int, ...]) -> list[int]: + normalized = sorted({int(day) for day in days if 0 <= int(day) <= 6}) + if not normalized: + return list(DEFAULT_WORKING_DAYS) + return normalized + + +def serialize_working_days(days: list[int] | set[int] | tuple[int, ...]) -> str: + return ",".join(str(day) for day in normalize_working_days(days)) + + +def parse_working_days_csv(value: str | None) -> set[int]: + if not value: + return set(DEFAULT_WORKING_DAYS) + + parsed: list[int] = [] + for part in value.split(","): + item = part.strip() + if not item: + continue + try: + parsed.append(int(item)) + except ValueError: + continue + + normalized = normalize_working_days(parsed) + return set(normalized) + + +def is_workday(day: date, relevant_weekdays: set[int]) -> bool: + return day.weekday() in relevant_weekdays diff --git a/app/static/css/AGENTS.md b/app/static/css/AGENTS.md new file mode 100644 index 0000000..a8ef76e --- /dev/null +++ b/app/static/css/AGENTS.md @@ -0,0 +1,6 @@ +# CSS-Regeln + +- Zentraler Entry ist `app.css`. +- Farben/Abstände/Radien/Shadow nur über `tokens.css` Variablen. +- Hex-Farben ausschließlich in `tokens.css`. +- Keine harten px-Abstände in `base/layout/components/utilities`. diff --git a/app/static/css/app.css b/app/static/css/app.css new file mode 100644 index 0000000..1e2a96f --- /dev/null +++ b/app/static/css/app.css @@ -0,0 +1,5 @@ +@import url("./tokens.css"); +@import url("./base.css"); +@import url("./layout.css"); +@import url("./components.css"); +@import url("./utilities.css"); diff --git a/app/static/css/base.css b/app/static/css/base.css new file mode 100644 index 0000000..a1d396c --- /dev/null +++ b/app/static/css/base.css @@ -0,0 +1,54 @@ +* { + box-sizing: border-box; +} + +html, +body { + margin: 0; + min-height: 100%; +} + +body { + background: var(--color-bg); + color: var(--color-text); + font-family: var(--font-family-base); + font-size: var(--font-size-md); + font-weight: var(--font-weight-normal); + line-height: 1.4; +} + +a { + color: var(--color-link); + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +img { + display: block; + height: auto; + max-width: 100%; +} + +h1, +h2, +h3, +h4, +h5, +h6, +p { + margin: 0; +} + +hr { + border: 0; + border-top: var(--border-width-1) solid var(--color-border); + margin: var(--space-5) 0; +} + +:focus-visible { + outline: var(--border-width-1) solid var(--color-warning); + outline-offset: var(--space-1); +} diff --git a/app/static/css/components.css b/app/static/css/components.css new file mode 100644 index 0000000..510f0a3 --- /dev/null +++ b/app/static/css/components.css @@ -0,0 +1,1611 @@ +.app-brand-wrap { + align-items: center; + display: inline-flex; + gap: var(--space-5); + min-height: var(--badge-height); +} + +.app-brand, +.brand { + align-items: center; + display: inline-flex; +} + +.app-logo { + height: var(--logo-size); + width: var(--logo-size); +} + +.app-total-badges { + align-items: center; + display: inline-flex; + gap: var(--space-3); +} + +.app-total-badge { + background: var(--color-header-badge-bg); + border: 0; + border-radius: var(--radius-md); + color: var(--color-header-badge-text); + display: flex; + flex-direction: column; + gap: var(--space-1); + justify-content: center; + min-height: 2.5rem; + min-width: 4.5rem; + padding: var(--space-3) var(--space-3) var(--space-2); + text-align: center; +} + +.app-total-badge__label { + color: var(--color-header-badge-label); + font-size: var(--font-size-xs); + font-weight: var(--font-weight-bold); + line-height: 1; +} + +.app-total-badge__value { + color: var(--color-header-badge-text); + font-size: var(--font-size-xl); + font-weight: var(--font-weight-bold); + letter-spacing: -0.02em; + line-height: 1; +} + +.settings-counter-badges { + justify-content: center; + margin-top: var(--space-2); + width: 100%; +} + +.settings-counter-badges .app-total-badge { + gap: calc(var(--space-1) * 1.5); + min-height: calc(2.5rem * 1.5); + min-width: calc(4.5rem * 1.5); + padding: calc(var(--space-3) * 1.5) calc(var(--space-3) * 1.5) calc(var(--space-2) * 1.5); +} + +.settings-counter-badges .app-total-badge__label { + font-size: var(--font-size-sm); +} + +.settings-counter-badges .app-total-badge__value { + font-size: var(--font-size-2xl); +} + +.settings-workhours-form { + margin-top: var(--space-4); +} + +.settings-workhours-inline-grid { + align-items: start; +} + +.settings-workhours-field { + align-content: start; + grid-template-rows: auto minmax(var(--input-height), auto); +} + +.settings-workhours-field-hint { + margin-top: calc(var(--space-3) * -1); +} + +.app-total-badge-vacation .app-total-badge__value { + color: var(--color-warning); +} + +.app-total-badge-workhours .app-total-badge__value { + color: var(--color-workhours); +} + +.app-total-badge-target .app-total-badge__value { + color: var(--color-text); +} + +.app-total-badge-projection .app-total-badge__value { + color: var(--color-workhours); +} + +.app-total-badge-projection.is-positive .app-total-badge__value { + color: var(--color-success-strong); +} + +.app-total-badge.is-negative .app-total-badge__value { + color: var(--color-danger-strong); +} + +.app-total-badge.is-positive .app-total-badge__value { + color: var(--color-success); +} + +.segmented-toggle { + align-items: stretch; + display: inline-flex; + gap: var(--space-2); +} + +.segmented-toggle__item { + align-items: center; + border: var(--border-width-1) solid transparent; + border-radius: var(--radius-sm); + color: var(--color-link); + display: inline-flex; + font-size: var(--font-size-md); + font-weight: var(--font-weight-medium); + height: var(--control-h); + justify-content: center; + line-height: 1; + min-width: calc(var(--control-h) + var(--space-3)); + padding: 0 var(--space-3); + text-decoration: none; +} + +.segmented-toggle__item:hover { + text-decoration: none; +} + +.segmented-toggle__item.is-active { + background: var(--color-badge-bg); + color: var(--color-bg); + font-weight: var(--font-weight-bold); +} + +.topbar-toggle .segmented-toggle__item { + border-radius: var(--radius-md); + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); + height: var(--badge-height); + min-width: auto; + padding: var(--badge-pad-top) var(--space-3) var(--badge-pad-bottom); +} + +.auth-toggle .segmented-toggle__item { + font-size: var(--font-size-xs); + min-width: auto; + padding: 0 var(--space-2); +} + +.app-icon-nav { + align-items: center; + display: inline-flex; + gap: var(--space-3); +} + +.icon-button, +.app-icon-btn { + align-items: center; + background: transparent; + border: var(--border-width-1) solid transparent; + border-radius: var(--radius-md); + color: var(--color-text); + cursor: pointer; + display: inline-flex; + height: var(--control-h); + justify-content: center; + min-width: var(--control-h); + padding: 0; + text-decoration: none; +} + +.icon-button:hover, +.app-icon-btn:hover { + background: var(--color-primary); + text-decoration: none; +} + +.dash-icon { + height: var(--header-icon-size); + width: var(--header-icon-size); +} + +.dash-icon-rotate-180 { + transform: rotate(180deg); +} + +.btn, +.button { + align-items: center; + background: var(--color-button-primary); + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-md); + color: var(--color-text); + cursor: pointer; + display: inline-flex; + font-size: var(--font-size-md); + font-weight: var(--font-weight-semibold); + gap: var(--space-2); + height: var(--control-h); + justify-content: center; + min-height: var(--control-h); + padding: 0 var(--space-4); + text-decoration: none; +} + +.btn:hover, +.button:hover { + background: var(--color-button-primary-hover); + text-decoration: none; +} + +.btn:disabled, +.button:disabled { + cursor: not-allowed; + opacity: 0.55; +} + +.btn--ghost, +.button.ghost { + background: transparent; +} + +.btn--ghost:hover, +.button.ghost:hover { + background: var(--color-primary); +} + +.btn--danger, +.button.danger { + background: var(--color-danger); +} + +.btn[type="submit"]:not(.btn--ghost, .btn--danger), +.button[type="submit"]:not(.ghost, .danger) { + border-color: var(--color-button-primary-border); +} + +.card, +.auth-card, +.form-card, +.register-section, +.register-section-note, +.register-actions, +.vacation-item, +.settings-note, +.admin-user-row, +.settings-section { + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + border: var(--border-width-1) solid var(--color-border); +} + +.card { + padding: var(--surface-pad); +} + +.auth-card, +.form-card { + margin: 0 auto; + width: min(42.5rem, 100%); +} + +.legal-card, +.contact-card { + display: grid; + gap: var(--space-5); +} + +.legal-content { + display: grid; + gap: var(--space-3); +} + +.legal-content h1, +.legal-content h2, +.legal-content h3, +.legal-content h4 { + color: var(--color-text); +} + +.legal-content p, +.legal-content li, +.legal-content blockquote { + color: var(--color-text); + line-height: 1.6; +} + +.legal-content ul, +.legal-content ol { + display: grid; + gap: var(--space-2); + margin: 0; + padding-inline-start: var(--space-5); +} + +.legal-content a { + color: var(--color-link); +} + +.form-card.full-width, +.settings-section.form-card.full-width { + max-width: none; + width: 100%; +} + +.auth-card-register, +.register-shell { + border: 0; + margin: 0; + padding: 0; + width: 100%; +} + +.auth-links { + display: grid; + gap: var(--space-2); + margin-top: var(--space-4); +} + +.auth-links p { + margin: 0; +} + +.auth-links a { + text-decoration: underline; + text-underline-offset: var(--space-1); +} + +.site-footer-nav { + display: inline-flex; + flex-wrap: wrap; + gap: var(--space-4); +} + +.site-footer-nav a { + color: var(--color-text-muted); + text-decoration: none; +} + +.site-footer-nav a:hover { + color: var(--color-text); + text-decoration: underline; +} + +.alert, +.error, +.success { + align-items: center; + border: var(--border-width-1) solid var(--color-border); + display: flex; + gap: var(--space-3); + justify-content: space-between; + margin-bottom: var(--space-2); + padding: var(--space-3) var(--space-4); +} + +.success, +.alert--success { + background: var(--color-success-bg); + color: var(--color-success); +} + +.error, +.alert--error { + background: color-mix(in srgb, var(--color-danger) 20%, var(--color-bg)); + color: var(--color-text); +} + +.alert--info { + background: color-mix(in srgb, var(--color-surface-2) 80%, transparent); +} + +.alert__close { + background: transparent; + border: 0; + color: inherit; + cursor: pointer; + font-size: var(--font-size-lg); +} + +.stack { + display: grid; + gap: var(--space-4); +} + +.inline-grid { + display: grid; + gap: var(--space-4); + grid-template-columns: repeat(2, minmax(11rem, 1fr)); +} + +.nav-row { + display: flex; + flex-wrap: wrap; + gap: var(--space-3); +} + +label, +.form-field { + display: grid; + gap: var(--space-2); + font-weight: var(--font-weight-semibold); +} + +.form-field--hint { + align-items: start; +} + +.stack-xs { + display: grid; + gap: var(--space-2); +} + +.form-field__label { + font-weight: var(--font-weight-semibold); +} + +input, +textarea, +select, +.input { + background: var(--color-surface-3); + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-md); + color: var(--color-text); + font: inherit; + min-height: var(--input-height); + padding: var(--space-3); +} + +textarea, +textarea.input { + min-height: 6rem; +} + +input:disabled, +textarea:disabled, +select:disabled { + color: var(--color-text-muted); +} + +.checkbox-row { + align-items: center; + display: flex; + gap: var(--space-2); + font-weight: var(--font-weight-medium); +} + +.checkbox-row input { + min-height: auto; +} + +.weekday-fieldset { + border: var(--border-width-1) solid var(--color-border); + margin: 0; + padding: var(--surface-pad-compact); +} + +.weekday-fieldset legend { + color: var(--color-text-muted); + font-size: var(--font-size-sm); + padding: 0 var(--space-1); +} + +.weekday-grid, +.register-weekday-grid { + display: grid; + gap: var(--space-2) var(--space-4); + grid-template-columns: repeat(4, minmax(7.5rem, 1fr)); +} + +.table-wrap { + overflow-x: auto; +} + +.table { + border-collapse: collapse; + width: 100%; +} + +.table th, +.table td { + border: var(--border-width-1) solid var(--color-border); + padding: var(--space-3); + text-align: left; +} + +.empty-state { + border: var(--border-width-1) dashed var(--color-border); + padding: var(--space-6); + text-align: center; +} + +.badge { + background: var(--color-surface-2); + border: var(--border-width-1) solid var(--color-border); + color: var(--color-text); + display: inline-flex; + font-size: var(--font-size-sm); + font-weight: var(--font-weight-bold); + padding: var(--space-1) var(--space-2); +} + +.modal { + inset: 0; + position: fixed; + z-index: var(--z-modal); +} + +.modal__backdrop { + background: color-mix(in srgb, var(--color-bg) 80%, transparent); + inset: 0; + position: absolute; +} + +.modal__dialog { + background: var(--color-surface); + border: var(--border-width-1) solid var(--color-border); + inset: 50% auto auto 50%; + max-width: min(40rem, 92vw); + position: absolute; + transform: translate(-50%, -50%); + width: 100%; +} + +.modal__header, +.modal__body { + padding: var(--surface-pad); +} + +.modal__header { + align-items: center; + border-bottom: var(--border-width-1) solid var(--color-border); + display: flex; + justify-content: space-between; +} + +.modal__close { + background: transparent; + border: 0; + color: var(--color-text); + cursor: pointer; + font-size: var(--font-size-lg); +} + +.pagination { + display: flex; + gap: var(--space-3); +} + +.top-row h1 { + font-size: var(--font-size-xl); + font-weight: var(--font-weight-bold); +} + +.settings-page .top-row { + margin-bottom: var(--space-3); + padding-top: var(--space-5); +} + +.settings-grid { + display: grid; + gap: var(--space-6); + margin-top: var(--space-2); +} + +.settings-tabs { + display: grid; + gap: var(--space-2); + grid-template-columns: repeat(2, minmax(0, 1fr)); + margin-bottom: var(--stack-2); +} + +.settings-tabs .segmented-toggle__item { + background: transparent; + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-none); + color: var(--color-text-muted); + font-size: var(--font-size-lg); + font-weight: var(--font-weight-semibold); +} + +.settings-tabs .segmented-toggle__item.is-active { + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + color: var(--color-text); +} + +.site-header.is-auth-header .auth-toggle .segmented-toggle__item { + font-size: var(--font-size-xs); + height: 1.5625rem; + min-width: 4.5rem; + padding: 0 var(--space-2); +} + +.settings-section { + padding: var(--space-4); +} + +.settings-section--collapsible { + padding: 0; +} + +.settings-section__summary { + align-items: center; + cursor: pointer; + display: flex; + gap: var(--space-3); + justify-content: space-between; + list-style: none; + padding: var(--surface-pad); +} + +.settings-section__summary::-webkit-details-marker { + display: none; +} + +.settings-section__heading { + font-size: var(--font-size-xl); + font-weight: var(--font-weight-semibold); +} + +.settings-section__chevron { + align-items: center; + color: var(--color-text-muted); + display: inline-flex; + flex: 0 0 auto; + font-size: var(--font-size-lg); + font-weight: var(--font-weight-semibold); + justify-content: center; + line-height: 1; + width: 1rem; +} + +.settings-section__chevron::before { + content: "+"; +} + +.settings-section--collapsible[open] .settings-section__chevron::before { + content: "-"; +} + +.settings-section__content { + border-top: var(--border-width-1) solid var(--color-border); + padding: 0 var(--surface-pad) var(--surface-pad); +} + +.settings-section__content > :first-child { + margin-top: var(--space-4); +} + +.settings-section .button { + width: 100%; +} + +.landing-shell { + justify-items: center; + padding-block: var(--space-7) var(--space-8); +} + +.landing-hero { + display: grid; + gap: var(--space-5); + width: min(44rem, 100%); +} + +.landing-hero--centered { + justify-items: center; + text-align: center; +} + +.landing-hero__copy { + justify-items: center; + margin: 0 auto; + width: min(36rem, 100%); +} + +.landing-hero__logo-wrap { + display: flex; + justify-content: center; +} + +.landing-hero__logo { + height: auto; + max-width: 12rem; + width: 100%; +} + +.landing-eyebrow { + color: var(--color-text-muted); + font-size: var(--font-size-sm); + font-weight: var(--font-weight-semibold); + justify-self: center; + letter-spacing: 0.02em; + margin: 0; + text-transform: uppercase; +} + +.landing-title { + font-size: var(--font-size-2xl); + font-weight: var(--font-weight-bold); + justify-self: center; + line-height: var(--line-height-tight); + margin: 0; + max-width: 30rem; + text-wrap: balance; +} + +.landing-lead { + color: var(--color-text); + font-size: var(--font-size-lg); + justify-self: center; + margin: 0; + max-width: 36rem; + text-align: center; +} + +.landing-cta-row { + display: flex; + flex-wrap: wrap; + gap: var(--space-3); + justify-content: center; + justify-self: center; +} + +.landing-cta-row .btn { + min-width: 11rem; +} + +.settings-auth-row { + display: grid; + gap: var(--space-2); + grid-template-columns: repeat(2, minmax(0, 1fr)); +} + +.settings-export-actions { + display: grid; + gap: var(--space-2); +} + +.settings-import-preview { + background: color-mix(in srgb, var(--color-surface) 95%, transparent); + border: var(--border-width-1) solid var(--color-border); + padding: var(--surface-pad); +} + +.settings-import-preview__header { + align-items: start; + display: grid; + gap: var(--space-2); + grid-template-columns: minmax(0, 1fr) auto; +} + +.settings-import-preview__mode { + align-self: center; + background: var(--color-surface-2); + border-radius: var(--radius-sm); + color: var(--color-text); + display: inline-flex; + min-height: var(--control-h); + padding: var(--space-1) var(--space-3); +} + +.settings-import-grid { + display: grid; + gap: var(--space-2); + grid-template-columns: repeat(5, minmax(0, 1fr)); +} + +.settings-import-stat { + background: color-mix(in srgb, var(--color-surface-2) 92%, transparent); + display: grid; + gap: var(--space-1); + padding: var(--space-3); +} + +.settings-import-summary { + display: grid; + gap: var(--space-2); +} + +.settings-import-conflicts { + display: grid; + gap: var(--space-1); + margin: 0; + padding-inline-start: var(--space-4); +} + +.contact-grid { + display: grid; + gap: var(--space-4); + grid-template-columns: minmax(0, 1.4fr) minmax(0, 1fr); +} + +.contact-info-list { + display: grid; + gap: var(--space-2); + margin: 0; + padding-inline-start: var(--space-5); +} + +.contact-honeypot { + height: 0; + left: -9999px; + overflow: hidden; + position: absolute; + width: 0; +} + +.danger-card { + border-color: color-mix(in srgb, var(--color-danger) 24%, var(--color-border)); +} + +.admin-user-row { + align-items: center; + column-gap: var(--space-3); + display: grid; + grid-template-columns: minmax(14rem, 1.4fr) repeat(3, minmax(7rem, 1fr)) minmax(13rem, auto); + padding: var(--surface-pad-compact); +} + +.admin-user-actions { + display: inline-flex; + gap: var(--space-2); + justify-content: flex-end; +} + +.register-shell, +.register-form, +.vacation-list, +.admin-user-list, +.admin-recipient-picker, +.support-ticket-list { + display: grid; + gap: var(--space-2); +} + +.support-ticket-card { + background: var(--color-surface-2); + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-md); + display: grid; + gap: var(--space-3); + padding: var(--surface-pad); +} + +.support-ticket-card__header { + display: flex; + flex-wrap: wrap; + gap: var(--space-3); + justify-content: space-between; +} + +.support-ticket-card__title-wrap, +.support-ticket-card__meta { + display: grid; + gap: var(--space-1); +} + +.support-ticket-card__title-wrap h3 { + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); +} + +.support-ticket-card__message { + background: var(--color-surface); + border-radius: var(--radius-md); + color: var(--color-text); + padding: var(--surface-pad-compact); + white-space: pre-wrap; +} + +.support-ticket-card__controls { + align-items: start; +} + +.admin-recipient-grid { + display: grid; + gap: var(--space-2) var(--space-4); + grid-template-columns: repeat(2, minmax(12rem, 1fr)); +} + +.vacation-list { + margin-top: var(--space-5); +} + +.vacation-item { + align-items: center; + display: grid; + gap: var(--space-3); + grid-template-columns: minmax(0, 1fr) auto; + padding: var(--surface-pad-compact); +} + +.settings-note { + padding: var(--surface-pad-compact); +} + +.settings-adjustment-meta { + display: grid; + gap: var(--space-2); + place-items: end; +} + +.help-intro, +.help-callout, +.help-section, +.help-guides, +.help-guide-card { + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + border: var(--border-width-1) solid var(--color-border); + display: grid; + gap: var(--space-3); + padding: var(--surface-pad); +} + +.help-grid { + display: grid; + gap: var(--space-4); + grid-template-columns: repeat(2, minmax(0, 1fr)); +} + +.help-guides { + margin-top: var(--space-4); +} + +.help-guides__header { + display: grid; + gap: var(--space-2); +} + +.help-guides__grid { + display: grid; + gap: var(--space-4); + grid-template-columns: repeat(2, minmax(0, 1fr)); +} + +.help-guide-card h3 { + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); +} + +.help-section__header, +.help-section__body { + display: grid; + gap: var(--space-3); +} + +.help-section h2, +.help-callout h2 { + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); +} + +.help-intro p, +.help-callout p, +.help-section p, +.help-guide-card p, +.help-section li { + color: var(--color-text); + line-height: 1.5; +} + +.help-list, +.help-steps { + display: grid; + gap: var(--space-3); + margin: 0; + padding-inline-start: var(--space-5); +} + +.help-list strong, +.help-steps strong { + font-weight: var(--font-weight-bold); +} + +.register-title { + font-size: var(--font-size-xl); + margin: var(--space-2) 0; +} + +.register-section, +.register-actions, +.register-section-note { + padding: var(--surface-pad); +} + +.register-section h2 { + font-size: var(--font-size-xl); + margin-bottom: var(--space-1); +} + +.register-subtitle, +.register-email-note, +.register-footer, +.register-section-note { + color: var(--color-text-muted); + font-size: var(--font-size-sm); +} + +.register-grid-2 { + display: grid; + gap: var(--space-4); + grid-template-columns: repeat(2, minmax(10rem, 1fr)); +} + +.register-checkbox-row { + display: grid; + gap: var(--space-2); +} + +.register-actions .btn, +.register-submit { + width: 100%; +} + +.register-footer { + padding: 0 var(--space-2); +} + +.week-view-shell, +.month-view-shell { + display: grid; + gap: var(--space-1); +} + +.week-group-list, +.day-list { + display: grid; + gap: 0; +} + +.period-header { + align-items: center; + background: var(--color-surface-2); + border: 0; + display: grid; + grid-template-columns: 1fr auto 1fr; + min-height: var(--period-height); + padding: var(--space-1) var(--space-3); +} + +.period-header__nav { + align-items: center; + display: inline-flex; + gap: var(--space-2); + grid-column: 2; + justify-self: center; +} + +.period-header__title { + font-size: var(--font-size-md); + font-weight: var(--font-weight-medium); + line-height: 1; + max-width: 100%; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.period-header__actions { + align-items: center; + display: inline-flex; + gap: var(--space-2); + grid-column: 3; + justify-self: end; +} + +.week-vacation-button { + align-items: center; + background: transparent; + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-sm); + color: var(--color-text); + cursor: pointer; + display: inline-flex; + font-size: var(--font-size-md); + font-weight: var(--font-weight-bold); + min-height: var(--control-h); + padding: 0 var(--space-3); +} + +.week-vacation-button.is-active { + background: var(--color-accent); + border-color: var(--color-warning); +} + +.kpi-bar { + display: grid; + gap: var(--space-2); + grid-template-columns: repeat(4, minmax(0, 1fr)); +} + +.kpi-bar__item { + align-items: center; + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + border: 0; + display: flex; + gap: var(--space-2); + min-height: var(--kpi-height); + padding: 0 var(--space-3); +} + +.kpi-bar__label { + font-size: var(--font-size-sm); + font-weight: var(--font-weight-medium); +} + +.kpi-bar__value { + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); +} + +.kpi-bar__item--editable { + justify-content: flex-start; +} + +.kpi-bar__edit { + align-items: center; + background: transparent; + border: 0; + cursor: pointer; + display: inline-flex; + margin-left: auto; + padding: 0; +} + +.kpi-bar__edit .dash-icon { + height: var(--icon-size-sm); + width: var(--icon-size-sm); +} + +.workhours-warning-banner { + align-items: center; + background: var(--color-danger-strong); + color: var(--color-text); + display: flex; + justify-content: center; + min-height: 2.25rem; + padding: 0 var(--space-5); + position: relative; +} + +.workhours-warning-text { + font-size: var(--font-size-lg); + font-weight: var(--font-weight-bold); + line-height: 1.1; + text-align: center; +} + +.workhours-warning-close { + background: transparent; + border: 0; + color: var(--color-text); + cursor: pointer; + font-size: var(--font-size-lg); + position: absolute; + right: var(--space-3); +} + +.dash-target-editor { + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + border: var(--border-width-1) solid var(--color-border); + padding: var(--space-5); +} + +.day-row { + align-items: center; + background: color-mix(in srgb, var(--color-surface-4) 92%, transparent); + border: 0; + display: grid; + gap: var(--space-2); + grid-template-columns: var(--day-col) 1fr var(--actions-col); + min-height: var(--row-height-compact); + padding: 0 var(--space-3); +} + +.day-row--weekend { + background: color-mix(in srgb, var(--color-weekend) 96%, transparent); +} + +.day-row--today { + background: var(--color-day-today); +} + +.day-row__label { + font-size: var(--font-size-xl); + font-weight: var(--font-weight-bold); + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.day-row__cells { + display: grid; + gap: var(--space-2); + grid-auto-columns: minmax(0, var(--chip-col)); + grid-auto-flow: column; + justify-content: start; +} + +.ui-chip { + align-items: center; + background: var(--color-chip-bg); + border: var(--border-width-1) solid var(--color-border-soft); + color: var(--color-text); + display: inline-flex; + font-size: var(--font-size-sm); + font-weight: var(--font-weight-medium); + min-height: var(--chip-h); + padding: 0 var(--space-3); + white-space: nowrap; +} + +.ui-chip--empty { + color: var(--color-text-muted); +} + +.day-row__actions { + align-items: center; + display: inline-flex; + gap: var(--space-2); + justify-content: flex-end; + line-height: 0; + padding-right: var(--space-2); +} + +.day-row__actions .dash-icon, +.day-row__add-menu .dash-icon, +.week-group-card-mobile .day-list .dash-icon { + height: var(--list-icon-size); + width: var(--list-icon-size); +} + +.day-row__actions .dash-icon[src$="/edit.svg"] { + transform: translateY(0.0625rem); +} + +.day-row__actions .dash-icon[src$="/delete.svg"] { + transform: translateY(0); +} + +.day-status-badge { + align-items: center; + background: transparent; + border: var(--border-width-1) solid var(--color-border); + border-radius: var(--radius-sm); + color: var(--color-text); + cursor: pointer; + display: inline-flex; + font-size: var(--font-size-md); + font-weight: var(--font-weight-bold); + justify-content: center; + min-height: var(--control-h); + min-width: var(--control-h); + padding: 0 var(--space-2); +} + +.day-status-badge.is-vacation { + background: var(--color-accent); + border-color: var(--color-warning); +} + +.day-status-badge.is-holiday { + background: color-mix(in srgb, var(--color-primary) 90%, transparent); +} + +.day-status-badge.is-sick { + background: color-mix(in srgb, var(--color-danger) 55%, var(--color-surface)); +} + +.day-status-badge.is-overtime { + background: color-mix(in srgb, var(--color-primary) 65%, var(--color-surface)); + border-color: var(--color-primary); +} + +.day-row__add-menu { + position: relative; +} + +.day-row__add-menu summary { + list-style: none; +} + +.day-row__add-menu summary::-webkit-details-marker { + display: none; +} + +.day-row__add-menu-panel { + box-shadow: var(--shadow-md); + background: var(--color-surface-2); + border: var(--border-width-1) solid var(--color-border); + display: grid; + gap: var(--space-2); + min-width: 13rem; + padding: var(--space-2); + position: absolute; + right: 0; + top: calc(100% + var(--space-1)); + z-index: var(--z-header); +} + +.day-row__add-menu-item { + align-items: center; + background: transparent; + border: 0; + color: var(--color-text); + cursor: pointer; + display: flex; + font-family: inherit; + font-size: var(--font-size-md); + justify-content: flex-start; + line-height: var(--line-height-tight); + min-height: var(--control-h); + padding: var(--space-2) var(--space-3); + text-decoration: none; + text-align: left; + width: 100%; + white-space: normal; +} + +.day-row__add-menu-item:hover { + background: var(--color-primary); + text-decoration: none; +} + +.week-group-list { + gap: var(--stack-3); +} + +.week-group-card-mobile { + background: color-mix(in srgb, var(--color-surface) 92%, transparent); + border: 0; + display: grid; + gap: var(--stack-2); + padding: var(--space-2); +} + +.week-group-header { + align-items: center; + background: color-mix(in srgb, var(--color-surface-2) 92%, transparent); + border: 0; + display: grid; + gap: var(--space-2); + grid-template-columns: 1fr auto auto; + min-height: var(--row-height-compact); + padding: var(--space-2) var(--space-3); +} + +.week-group-header__left { + align-items: center; + display: inline-flex; + font-size: var(--font-size-md); + gap: var(--space-2); + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.week-group-header__meta { + font-size: var(--font-size-sm); + line-height: 1.2; + white-space: nowrap; +} + +.week-group-header__action { + justify-self: end; +} + +.week-group-card-mobile .day-list { + gap: var(--stack-1); + padding-inline-start: var(--space-2); +} + +.positive { + color: var(--color-success); +} + +.negative { + color: var(--color-danger-strong); +} + +.inline-form { + align-items: center; + display: inline-flex; + margin: 0; +} + +.day-row__add-menu-panel .inline-form { + display: block; + width: 100%; +} + +@media (max-width: 51.25em) { + :root { + --control-h: var(--control-h-mobile); + --header-icon-size: var(--list-icon-size); + --logo-size: 2rem; + } + + .landing-shell { + padding-block: var(--space-5) var(--space-6); + } + + .landing-hero__logo { + max-width: 10rem; + } + + .app-total-badge { + font-size: var(--font-size-sm); + } + + .topbar-toggle .segmented-toggle__item { + font-size: var(--font-size-md); + } + + .inline-grid, + .register-grid-2, + .settings-auth-row, + .settings-import-preview__header, + .settings-import-grid, + .admin-user-row, + .admin-recipient-grid, + .vacation-item { + grid-template-columns: 1fr; + } + + .settings-adjustment-meta { + place-items: start; + } + + .help-grid { + grid-template-columns: 1fr; + } + + .help-guides__grid { + grid-template-columns: 1fr; + } + + .contact-grid { + grid-template-columns: 1fr; + } + + .period-header { + margin-inline: calc(var(--gutter-tablet) * -1); + grid-template-columns: 1fr; + row-gap: var(--space-2); + padding: var(--space-2); + } + + .period-header__nav { + grid-column: 1; + } + + .period-header__actions { + grid-column: 1; + justify-self: center; + } + + .kpi-bar { + margin-inline: calc(var(--gutter-tablet) * -1); + grid-template-columns: repeat(2, minmax(0, 1fr)); + } + + .dashboard-page .day-list--week, + .month-page .week-group-list { + margin-inline: calc(var(--gutter-tablet) * -1); + } + + .month-page .week-group-card-mobile { + background: transparent; + padding: 0; + } + + .day-row { + grid-template-columns: 1fr auto; + row-gap: var(--space-2); + padding: var(--space-2) var(--space-3); + } + + .day-row__label { + grid-column: 1; + grid-row: 1; + } + + .day-row__actions { + grid-column: 2; + grid-row: 1; + padding-right: 0; + } + + .day-row__cells { + grid-column: 1 / 3; + grid-row: 2; + grid-template-columns: repeat(3, minmax(0, 1fr)); + } + + .day-row__cells.day-row__cells--empty { + grid-template-columns: 1fr; + } + + .week-group-header { + grid-template-columns: 1fr auto; + row-gap: var(--space-1); + } + + .week-group-header__left { + grid-column: 1; + grid-row: 1; + } + + .week-group-header__action { + grid-column: 2; + grid-row: 1; + } + + .week-group-header__meta { + grid-column: 1 / 3; + grid-row: 2; + font-size: var(--font-size-xs); + line-height: 1.2; + white-space: normal; + } + + .week-group-card-mobile .day-list { + padding-inline-start: 0; + } +} + +@media (max-width: 32.5em) { + .period-header, + .kpi-bar { + margin-inline: calc(var(--gutter-mobile) * -1); + } + + .dashboard-page .day-list--week, + .month-page .week-group-list { + margin-inline: calc(var(--gutter-mobile) * -1); + } + + .landing-title { + font-size: var(--font-size-xl); + } + + .landing-lead { + font-size: var(--font-size-md); + } + + .landing-cta-row .btn { + width: 100%; + } + + .weekday-grid, + .register-weekday-grid { + grid-template-columns: 1fr 1fr; + } + + .period-header__title { + font-size: var(--font-size-md); + text-align: center; + } + + .day-row__label { + font-size: var(--font-size-lg); + } +} + +@media (min-width: 51.26em) { + .month-page .week-group-list { + gap: var(--space-3); + } + + .month-page .week-group-card-mobile { + background: transparent; + border: 0; + gap: var(--space-2); + padding: 0; + } + + .month-page .week-group-header { + border: 0; + grid-template-columns: auto 1fr auto; + min-height: 2.625rem; + } + + .month-page .week-group-header__meta { + justify-self: end; + text-align: right; + } + + .month-page .day-list--month { + margin-left: 0; + padding-left: 0; + } +} diff --git a/app/static/css/layout.css b/app/static/css/layout.css new file mode 100644 index 0000000..2e76489 --- /dev/null +++ b/app/static/css/layout.css @@ -0,0 +1,183 @@ +.container { + margin: 0 auto; + padding: 0 var(--gutter-desktop); + width: min(var(--container-max), 100%); +} + +.page { + padding: var(--stack-2) 0 var(--space-7); +} + +.dashboard-page, +.month-page, +.settings-page, +.register-page, +.legal-page, +.contact-page { + padding-top: var(--space-0); +} + +.site-header { + background: var(--color-bg); + border-bottom: var(--border-width-1) solid var(--color-border); + position: sticky; + top: var(--space-0); + z-index: var(--z-header); +} + +.site-header .container { + padding-top: var(--header-pad-y); + padding-bottom: var(--header-pad-y); +} + +.app-topbar-inner { + align-items: center; + column-gap: var(--space-4); + display: grid; + grid-template-columns: auto 1fr; + min-height: calc(var(--control-h) + var(--space-1)); +} + +.site-header.is-auth-header .container { + padding-top: var(--header-pad-y-auth); + padding-bottom: var(--header-pad-y-auth); +} + +.app-topbar-inner.is-guest { + grid-template-columns: auto 1fr; +} + +.app-topbar-inner.is-guest .app-auth-nav { + justify-self: end; +} + +.site-header.is-auth-header .app-topbar-inner.is-guest { + min-height: 2.25rem; +} + +.app-topbar-inner.is-user .app-user-nav { + align-items: center; + display: inline-flex; + gap: var(--space-4); + justify-self: end; +} + +.site-footer { + border-top: var(--border-width-1) solid var(--color-border); + margin-top: var(--space-6); +} + +.site-footer-inner { + align-items: center; + display: flex; + gap: var(--space-4); + justify-content: space-between; + padding: var(--space-4) var(--page-content-inset) var(--space-5); +} + +.admin-version-badge { + background: color-mix(in srgb, var(--color-surface-2) 94%, transparent); + border: var(--border-width-1) solid var(--color-border); + bottom: var(--space-3); + color: var(--color-text-muted); + font-size: var(--font-size-xs); + font-weight: var(--font-weight-semibold); + padding: var(--space-1) var(--space-2); + position: fixed; + right: var(--space-3); + z-index: var(--z-header); +} + +.page-header { + display: grid; + gap: var(--space-2); + margin-bottom: var(--space-4); + padding-inline: var(--page-content-inset); +} + +.page-header__title { + font-size: var(--font-size-2xl); + font-weight: var(--font-weight-bold); +} + +.page-header__subtitle { + color: var(--color-text-muted); +} + +.app-page-actions-wrap { + padding-bottom: var(--space-2); +} + +.app-page-actions-wrap:empty { + display: none; +} + +.top-row { + margin-bottom: var(--space-2); + padding-inline: var(--page-content-inset); +} + +@media (max-width: 51.25em) { + .container { + padding: 0 var(--gutter-tablet); + } + + .app-topbar-inner.is-user { + gap: var(--header-pad-y); + grid-template-columns: 1fr; + grid-template-rows: auto auto; + } + + .app-topbar-inner.is-user .app-brand-wrap { + align-items: center; + display: flex; + grid-column: 1; + grid-row: 1; + justify-content: space-between; + width: 100%; + } + + .app-topbar-inner.is-user .app-user-nav { + align-items: center; + display: grid; + gap: var(--space-2); + grid-column: 1; + grid-row: 2; + grid-template-columns: 1fr auto 1fr; + justify-self: stretch; + width: 100%; + } + + .app-topbar-inner.is-user .app-user-nav .app-main-nav { + grid-column: 2; + justify-self: center; + } + + .app-topbar-inner.is-user .app-user-nav .app-icon-nav { + grid-column: 3; + justify-self: end; + } + + .app-topbar-inner.is-guest { + column-gap: var(--space-2); + grid-template-columns: auto 1fr; + } + + .admin-version-badge { + bottom: var(--space-2); + left: 50%; + right: auto; + transform: translateX(-50%); + } + + .site-footer-inner { + align-items: flex-start; + flex-direction: column; + } +} + +@media (max-width: 32.5em) { + .container { + padding: 0 var(--gutter-mobile); + } +} diff --git a/app/static/css/tokens.css b/app/static/css/tokens.css new file mode 100644 index 0000000..d1f794c --- /dev/null +++ b/app/static/css/tokens.css @@ -0,0 +1,101 @@ +:root { + --color-bg: #2c2d2f; + --color-surface: #27282a; + --color-surface-2: #1f2022; + --color-surface-3: #232426; + --color-surface-4: #2a2b2e; + --color-border: #34363a; + --color-border-soft: #2f3135; + --color-text: #f5f5f5; + --color-text-muted: #8f9298; + --color-link: #f5f5f5; + --color-primary: #3a3c40; + --color-primary-hover: #46494e; + --color-button-primary: #355d3a; + --color-button-primary-hover: #3f6f45; + --color-button-primary-border: #4d7f53; + --color-danger: #bc5252; + --color-danger-strong: #ff3b3b; + --color-success: #9ed7a7; + --color-success-strong: #2cd600; + --color-success-bg: #243427; + --color-warning: #c98f13; + --color-warning-bg: #6d5500; + --color-chip-bg: #202124; + --color-weekend: #22252b; + --color-day-today: #1a3f4a; + --color-accent: #9e7700; + --color-badge-bg: #f3f3f3; + --color-badge-text: #222326; + --color-workhours: #7f53d9; + --color-header-badge-bg: #1d1d1f; + --color-header-badge-label: #f5f5f5; + --color-header-badge-text: #f5f5f5; + + --font-family-base: "Atkinson Hyperlegible", "Segoe UI", sans-serif; + --font-size-xs: 0.75rem; + --font-size-sm: 0.875rem; + --font-size-md: 1rem; + --font-size-lg: 1.125rem; + --font-size-xl: 1.5rem; + --font-size-2xl: 2rem; + --font-weight-normal: 400; + --font-weight-medium: 500; + --font-weight-semibold: 600; + --font-weight-bold: 700; + --line-height-tight: 1.2; + + --space-0: 0; + --space-1: 0.125rem; + --space-2: 0.25rem; + --space-3: 0.5rem; + --space-4: 0.75rem; + --space-5: 1rem; + --space-6: 1.25rem; + --space-7: 1.5rem; + --space-8: 2rem; + --stack-1: 0.5rem; + --stack-2: 0.75rem; + --stack-3: 1rem; + + --radius-none: 0; + --radius-sm: 0.375rem; + --radius-md: 0.5rem; + + --border-width-1: 1px; + + --container-max: 100%; + --gutter-desktop: 1rem; + --gutter-tablet: 1rem; + --gutter-mobile: 0.875rem; + --page-content-inset: 1rem; + --surface-pad: 1rem; + --surface-pad-compact: 0.875rem; + --control-h-desktop: 2rem; + --control-h-mobile: 2rem; + --control-h: var(--control-h-desktop); + --list-icon-size: 2rem; + --icon-size-sm: var(--list-icon-size); + --icon-size: var(--list-icon-size); + --header-icon-size: var(--list-icon-size); + --logo-size: 1.375rem; + --header-pad-y: 0.625rem; + --header-pad-y-auth: 0.625rem; + --badge-height: 2rem; + --badge-pad-top: 0.2rem; + --badge-pad-bottom: 0.05rem; + --input-height: 2.5rem; + --chip-h: 2rem; + --row-height-compact: 2.625rem; + --kpi-height: 2.75rem; + --period-height: 3.25rem; + --day-col: 16rem; + --chip-col: 8.5rem; + --actions-col: 7.5rem; + + --z-header: 20; + --z-modal: 40; + + --bp-md: 51.25em; + --bp-sm: 32.5em; +} diff --git a/app/static/css/utilities.css b/app/static/css/utilities.css new file mode 100644 index 0000000..97732b1 --- /dev/null +++ b/app/static/css/utilities.css @@ -0,0 +1,19 @@ +.muted { + color: var(--color-text-muted); +} + +.is-hidden, +.u-hidden, +[hidden] { + display: none !important; +} + +.u-stack-sm { + display: grid; + gap: var(--space-3); +} + +.u-stack-md { + display: grid; + gap: var(--space-4); +} diff --git a/app/static/dashboard.js b/app/static/dashboard.js new file mode 100644 index 0000000..85dce64 --- /dev/null +++ b/app/static/dashboard.js @@ -0,0 +1 @@ +// legacy script deprecated. Use /static/js/app.js only. diff --git a/app/static/icons/add.svg b/app/static/icons/add.svg new file mode 100644 index 0000000..48fe4d2 --- /dev/null +++ b/app/static/icons/add.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/apple-touch-icon-stage.png b/app/static/icons/apple-touch-icon-stage.png new file mode 100644 index 0000000..82230a2 Binary files /dev/null and b/app/static/icons/apple-touch-icon-stage.png differ diff --git a/app/static/icons/apple-touch-icon.png b/app/static/icons/apple-touch-icon.png new file mode 100644 index 0000000..e5e635e Binary files /dev/null and b/app/static/icons/apple-touch-icon.png differ diff --git a/app/static/icons/arrow.svg b/app/static/icons/arrow.svg new file mode 100644 index 0000000..5c2ae84 --- /dev/null +++ b/app/static/icons/arrow.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/batch.svg b/app/static/icons/batch.svg new file mode 100644 index 0000000..e925aca --- /dev/null +++ b/app/static/icons/batch.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/column.svg b/app/static/icons/column.svg new file mode 100644 index 0000000..e83dd29 --- /dev/null +++ b/app/static/icons/column.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/dark-mode.svg b/app/static/icons/dark-mode.svg new file mode 100644 index 0000000..4b5ee8b --- /dev/null +++ b/app/static/icons/dark-mode.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/delete.svg b/app/static/icons/delete.svg new file mode 100644 index 0000000..bd455e1 --- /dev/null +++ b/app/static/icons/delete.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/edit.svg b/app/static/icons/edit.svg new file mode 100644 index 0000000..7bb858f --- /dev/null +++ b/app/static/icons/edit.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/export.svg b/app/static/icons/export.svg new file mode 100644 index 0000000..b4d5beb --- /dev/null +++ b/app/static/icons/export.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/favicon-16.png b/app/static/icons/favicon-16.png new file mode 100644 index 0000000..be1a9a9 Binary files /dev/null and b/app/static/icons/favicon-16.png differ diff --git a/app/static/icons/favicon-32.png b/app/static/icons/favicon-32.png new file mode 100644 index 0000000..4a33b7b Binary files /dev/null and b/app/static/icons/favicon-32.png differ diff --git a/app/static/icons/favicon.ico b/app/static/icons/favicon.ico new file mode 100644 index 0000000..f1df61b Binary files /dev/null and b/app/static/icons/favicon.ico differ diff --git a/app/static/icons/help.svg b/app/static/icons/help.svg new file mode 100644 index 0000000..3aa4c4b --- /dev/null +++ b/app/static/icons/help.svg @@ -0,0 +1,11 @@ + + + + + diff --git a/app/static/icons/light-mode.svg b/app/static/icons/light-mode.svg new file mode 100644 index 0000000..9138a5b --- /dev/null +++ b/app/static/icons/light-mode.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/logo-mark.svg b/app/static/icons/logo-mark.svg new file mode 100644 index 0000000..a1c6f71 --- /dev/null +++ b/app/static/icons/logo-mark.svg @@ -0,0 +1,8 @@ + + + diff --git a/app/static/icons/logout.svg b/app/static/icons/logout.svg new file mode 100644 index 0000000..4587562 --- /dev/null +++ b/app/static/icons/logout.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/icons/pwa-192.png b/app/static/icons/pwa-192.png new file mode 100644 index 0000000..f59522a Binary files /dev/null and b/app/static/icons/pwa-192.png differ diff --git a/app/static/icons/pwa-512.png b/app/static/icons/pwa-512.png new file mode 100644 index 0000000..8ddc950 Binary files /dev/null and b/app/static/icons/pwa-512.png differ diff --git a/app/static/icons/pwa-stage-192.png b/app/static/icons/pwa-stage-192.png new file mode 100644 index 0000000..255a758 Binary files /dev/null and b/app/static/icons/pwa-stage-192.png differ diff --git a/app/static/icons/pwa-stage-512.png b/app/static/icons/pwa-stage-512.png new file mode 100644 index 0000000..ca7ad15 Binary files /dev/null and b/app/static/icons/pwa-stage-512.png differ diff --git a/app/static/icons/settings.svg b/app/static/icons/settings.svg new file mode 100644 index 0000000..14d2752 --- /dev/null +++ b/app/static/icons/settings.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/static/js/app.js b/app/static/js/app.js new file mode 100644 index 0000000..390fda6 --- /dev/null +++ b/app/static/js/app.js @@ -0,0 +1,23 @@ +import { initCsrf } from './components/csrf.js'; +import { initFlash } from './components/flash.js'; +import { initForms } from './components/forms.js?v=20260322a'; +import { initModal } from './components/modal.js'; +import { initDashboard } from './components/dashboard.js'; +import { initSettingsSections } from './components/settings-sections.js'; + +function initApp() { + initCsrf(); + initFlash(); + initForms(); + initModal(); + initDashboard(); + initSettingsSections(); +} + +window.__stundenfuchsInitApp = initApp; + +if (document.readyState === 'loading') { + document.addEventListener('DOMContentLoaded', initApp); +} else { + initApp(); +} diff --git a/app/static/js/components/csrf.js b/app/static/js/components/csrf.js new file mode 100644 index 0000000..122e467 --- /dev/null +++ b/app/static/js/components/csrf.js @@ -0,0 +1,17 @@ +export function initCsrf() { + const tokenInput = document.querySelector('input[name="csrf_token"]'); + const token = tokenInput ? tokenInput.value : null; + if (!token) { + return; + } + + document.querySelectorAll('form[method="post"]').forEach((form) => { + if (!form.querySelector('input[name="csrf_token"]')) { + const hidden = document.createElement('input'); + hidden.type = 'hidden'; + hidden.name = 'csrf_token'; + hidden.value = token; + form.appendChild(hidden); + } + }); +} diff --git a/app/static/js/components/dashboard.js b/app/static/js/components/dashboard.js new file mode 100644 index 0000000..f20cf83 --- /dev/null +++ b/app/static/js/components/dashboard.js @@ -0,0 +1,153 @@ +function isInteractiveTouchTarget(target) { + if (!target || typeof target.closest !== 'function') { + return false; + } + return Boolean(target.closest('a, button, input, select, textarea, summary, details, label, form')); +} + +function attachSwipeNavigation(target, prevUrl, nextUrl) { + if (!target || !prevUrl || !nextUrl) { + return; + } + + const minSwipeDistance = 60; + const maxVerticalRatio = 1.25; + const maxSwipeDuration = 900; + let startX = 0; + let startY = 0; + let startAt = 0; + let tracking = false; + let navigating = false; + + target.addEventListener('touchstart', (event) => { + if (event.touches.length !== 1 || isInteractiveTouchTarget(event.target)) { + tracking = false; + return; + } + + const touch = event.touches[0]; + startX = touch.clientX; + startY = touch.clientY; + startAt = Date.now(); + tracking = true; + }, { passive: true }); + + target.addEventListener('touchend', (event) => { + if (!tracking || navigating || event.changedTouches.length !== 1) { + tracking = false; + return; + } + + tracking = false; + const touch = event.changedTouches[0]; + const deltaX = touch.clientX - startX; + const deltaY = touch.clientY - startY; + const absX = Math.abs(deltaX); + const absY = Math.abs(deltaY); + const duration = Date.now() - startAt; + + if (duration > maxSwipeDuration || absX < minSwipeDistance || absX <= absY * maxVerticalRatio) { + return; + } + + navigating = true; + if (deltaX < 0) { + window.location.assign(nextUrl); + } else { + window.location.assign(prevUrl); + } + }, { passive: true }); +} + +function initSwipeNavigation() { + if (!window.matchMedia('(pointer: coarse)').matches && !('ontouchstart' in window)) { + return; + } + + document.querySelectorAll('[data-component="swipe-nav"]').forEach((node) => { + attachSwipeNavigation(node, node.dataset.prevUrl, node.dataset.nextUrl); + }); +} + +function initWarningBanner() { + const warningBanner = document.querySelector('[data-component="workhours-warning"]'); + if (!warningBanner) { + return; + } + + const warningKey = warningBanner.getAttribute('data-workhours-warning') || ''; + const storageKey = warningKey ? `workhours-warning-dismissed:${warningKey}` : ''; + + if (storageKey && window.localStorage.getItem(storageKey) === '1') { + warningBanner.remove(); + return; + } + + const closeButton = warningBanner.querySelector('[data-action="warning-close"]'); + if (closeButton) { + closeButton.addEventListener('click', () => { + warningBanner.remove(); + if (storageKey) { + window.localStorage.setItem(storageKey, '1'); + } + }); + } +} + +function initWeeklyTargetEditor() { + const form = document.querySelector('.weekly-target-form'); + const editor = document.querySelector('[data-component="weekly-target-editor"]'); + const toggleButtons = document.querySelectorAll('.js-toggle-weekly-target-editor'); + + if (form && toggleButtons.length && editor) { + toggleButtons.forEach((toggleButton) => { + toggleButton.addEventListener('click', () => { + editor.classList.toggle('is-hidden'); + }); + }); + } + + if (!form) { + return; + } + + form.addEventListener('submit', (event) => { + const scopeSelect = form.querySelector("select[name='scope']"); + const hoursInput = form.querySelector("input[name='weekly_target_hours']"); + if (!scopeSelect || !hoursInput) { + return; + } + + const scope = scopeSelect.value; + const hours = hoursInput.value; + let scopeText = ''; + + if (scope === 'current_week') { + scopeText = 'Nur die aktuell ausgewählte Woche'; + } else if (scope === 'all_weeks') { + scopeText = 'Alle Wochen (Vergangenheit und Zukunft)'; + } else if (scope === 'from_current_week') { + scopeText = 'Aktuelle Woche und alle zukünftigen Wochen'; + } + + if (!scopeText) { + return; + } + + const confirmed = window.confirm(`Wochen-Soll wirklich ändern?\nNeuer Wert: ${hours} h\nGültigkeit: ${scopeText}`); + if (!confirmed) { + event.preventDefault(); + return; + } + + if (editor) { + editor.classList.add('is-hidden'); + } + }); +} + +export function initDashboard() { + initSwipeNavigation(); + initWarningBanner(); + initWeeklyTargetEditor(); +} diff --git a/app/static/js/components/flash.js b/app/static/js/components/flash.js new file mode 100644 index 0000000..0f03ee9 --- /dev/null +++ b/app/static/js/components/flash.js @@ -0,0 +1,10 @@ +export function initFlash() { + document.querySelectorAll('[data-component="flash"]').forEach((flash) => { + flash.addEventListener('click', (event) => { + const target = event.target; + if (target instanceof HTMLElement && target.dataset.action === 'flash-close') { + flash.remove(); + } + }); + }); +} diff --git a/app/static/js/components/forms.js b/app/static/js/components/forms.js new file mode 100644 index 0000000..53c5d14 --- /dev/null +++ b/app/static/js/components/forms.js @@ -0,0 +1,255 @@ +async function refreshCurrentViewPreservingScroll() { + const scrollX = window.scrollX; + const scrollY = window.scrollY; + const response = await fetch(window.location.href, { + credentials: 'same-origin', + headers: { 'X-Requested-With': 'fetch' }, + }); + if (!response.ok) { + throw new Error(`refresh_failed_${response.status}`); + } + + const html = await response.text(); + const nextDocument = new DOMParser().parseFromString(html, 'text/html'); + ['.site-header', '.app-page-actions-wrap', 'main.page'].forEach((selector) => { + const currentNode = document.querySelector(selector); + const nextNode = nextDocument.querySelector(selector); + if (currentNode && nextNode) { + currentNode.replaceWith(nextNode); + } + }); + + window.scrollTo({ left: scrollX, top: scrollY }); + if (typeof window.__stundenfuchsInitApp === 'function') { + window.__stundenfuchsInitApp(); + } +} + +function parseTimeToMinutes(value) { + const match = /^(\d{2}):(\d{2})$/.exec(value || ''); + if (!match) { + return null; + } + return Number(match[1]) * 60 + Number(match[2]); +} + +function formatMinutesToTime(value) { + const minutes = Math.max(0, Math.min(24 * 60 - 1, Number(value) || 0)); + const hoursPart = String(Math.floor(minutes / 60)).padStart(2, '0'); + const minutesPart = String(minutes % 60).padStart(2, '0'); + return `${hoursPart}:${minutesPart}`; +} + +function requiredBreakMinutesForSpan(spanMinutes) { + if (spanMinutes > 9 * 60) { + return 45; + } + if (spanMinutes > 6 * 60) { + return 30; + } + return 0; +} + +function requiredBreakMinutesForNetMinutes(netMinutes) { + if (netMinutes > (9 * 60 - 45)) { + return 45; + } + if (netMinutes > (6 * 60 - 30)) { + return 30; + } + return 0; +} + +function initFullDayButtons() { + document.querySelectorAll('[data-action="entry-apply-full-day"]').forEach((button) => { + if (!(button instanceof HTMLButtonElement) || button.dataset.fullDayBound === 'true') { + return; + } + button.dataset.fullDayBound = 'true'; + + const form = button.closest('form[data-component="break-rules-form"]'); + if (!(form instanceof HTMLFormElement)) { + return; + } + + const startInput = form.querySelector('[data-break-input="start"]'); + const endInput = form.querySelector('[data-break-input="end"]'); + if (!(startInput instanceof HTMLInputElement) || !(endInput instanceof HTMLInputElement)) { + return; + } + + button.addEventListener('click', () => { + const netMinutes = Number(form.dataset.fullDayNetMinutes || ''); + if (!Number.isFinite(netMinutes) || netMinutes <= 0) { + return; + } + + const defaultStartValue = form.dataset.defaultStartTime || '08:30'; + const startMinutes = parseTimeToMinutes(startInput.value) ?? parseTimeToMinutes(defaultStartValue); + if (startMinutes === null) { + return; + } + + const autoBreakEnabled = form.dataset.autoBreakEnabled === 'true'; + const configuredBreakMinutes = Number(form.dataset.defaultBreakMinutes || '0'); + const breakMinutes = autoBreakEnabled + ? requiredBreakMinutesForNetMinutes(netMinutes) + : Math.max(0, configuredBreakMinutes); + const endMinutes = startMinutes + netMinutes + breakMinutes; + + startInput.value = formatMinutesToTime(startMinutes); + endInput.value = formatMinutesToTime(endMinutes); + }); + }); +} + +function initBreakRuleForms() { + document.querySelectorAll('form[data-component="break-rules-form"]').forEach((form) => { + if (form.dataset.breakBound === 'true') { + return; + } + form.dataset.breakBound = 'true'; + const autoBreakEnabled = form.dataset.autoBreakEnabled === 'true'; + + const modeInput = form.querySelector('[data-break-mode]'); + const startInput = form.querySelector('[data-break-input="start"]'); + const endInput = form.querySelector('[data-break-input="end"]'); + const breakInput = form.querySelector('[data-break-input="minutes"]'); + const statusNode = form.querySelector('[data-break-status]'); + const resetButton = form.querySelector('[data-action="break-reset-auto"]'); + + if (!(modeInput instanceof HTMLInputElement) || !(startInput instanceof HTMLInputElement) || !(endInput instanceof HTMLInputElement)) { + return; + } + + const updateStatus = () => { + if (!statusNode) { + return; + } + statusNode.textContent = modeInput.value === 'manual' + ? 'Pause manuell gesetzt. Gesetzliche Mindestpause wird nicht automatisch überschrieben.' + : 'Gesetzliche Mindestpause nach deutschem Arbeitsrecht wird automatisch vorgeschlagen.'; + }; + + const applyAutoBreak = () => { + if (!(breakInput instanceof HTMLInputElement)) { + return; + } + const startMinutes = parseTimeToMinutes(startInput.value); + const endMinutes = parseTimeToMinutes(endInput.value); + if (startMinutes === null || endMinutes === null || endMinutes <= startMinutes) { + return; + } + modeInput.value = 'auto'; + breakInput.value = String(requiredBreakMinutesForSpan(endMinutes - startMinutes)); + updateStatus(); + }; + + const setManualMode = () => { + modeInput.value = 'manual'; + updateStatus(); + }; + + startInput.addEventListener('input', () => { + if (modeInput.value === 'auto') { + applyAutoBreak(); + } + }); + endInput.addEventListener('input', () => { + if (modeInput.value === 'auto') { + applyAutoBreak(); + } + }); + if (breakInput instanceof HTMLInputElement) { + breakInput.addEventListener('input', setManualMode); + } + if (resetButton) { + resetButton.addEventListener('click', applyAutoBreak); + } + + if (!autoBreakEnabled) { + return; + } + if (!modeInput.value) { + modeInput.value = 'auto'; + } + if (modeInput.value === 'auto') { + applyAutoBreak(); + } else { + updateStatus(); + } + }); +} + +function initAsyncRefreshForms() { + document.querySelectorAll('form[data-async-refresh="view"]').forEach((form) => { + if (form.dataset.asyncBound === 'true') { + return; + } + form.dataset.asyncBound = 'true'; + form.addEventListener('submit', async (event) => { + event.preventDefault(); + const submitter = event.submitter instanceof HTMLElement ? event.submitter : null; + if (submitter) { + submitter.setAttribute('disabled', 'disabled'); + } + + try { + const response = await fetch(form.action, { + method: 'POST', + body: new FormData(form), + credentials: 'same-origin', + headers: { 'X-Requested-With': 'fetch' }, + }); + if (!response.ok) { + throw new Error(`submit_failed_${response.status}`); + } + await refreshCurrentViewPreservingScroll(); + } catch (error) { + window.location.assign(window.location.href); + } finally { + if (submitter) { + submitter.removeAttribute('disabled'); + } + } + }); + }); +} + +function initBreakSettingsForms() { + document.querySelectorAll('form[data-component="break-settings-form"]').forEach((form) => { + if (form.dataset.breakSettingsBound === 'true') { + return; + } + form.dataset.breakSettingsBound = 'true'; + + const toggle = form.querySelector('[data-break-settings-toggle]'); + const minutesInput = form.querySelector('[data-break-settings-minutes]'); + + if (!(toggle instanceof HTMLInputElement) || !(minutesInput instanceof HTMLInputElement)) { + return; + } + + const syncDisabledState = () => { + minutesInput.disabled = toggle.checked; + }; + + toggle.addEventListener('change', syncDisabledState); + syncDisabledState(); + }); +} + +export function initForms() { + document.querySelectorAll('form[data-confirm]').forEach((form) => { + form.addEventListener('submit', (event) => { + const message = form.getAttribute('data-confirm') || 'Aktion wirklich ausführen?'; + if (!window.confirm(message)) { + event.preventDefault(); + } + }); + }); + initAsyncRefreshForms(); + initFullDayButtons(); + initBreakRuleForms(); + initBreakSettingsForms(); +} diff --git a/app/static/js/components/modal.js b/app/static/js/components/modal.js new file mode 100644 index 0000000..a8f8264 --- /dev/null +++ b/app/static/js/components/modal.js @@ -0,0 +1,27 @@ +export function initModal() { + const modals = document.querySelectorAll('[data-component="modal"]'); + modals.forEach((modal) => { + modal.addEventListener('click', (event) => { + const target = event.target; + if (!(target instanceof HTMLElement)) { + return; + } + if (target.dataset.action === 'modal-close') { + modal.setAttribute('hidden', 'hidden'); + } + }); + }); + + document.querySelectorAll('[data-action="modal-open"]').forEach((trigger) => { + trigger.addEventListener('click', () => { + const id = trigger.getAttribute('data-target'); + if (!id) { + return; + } + const modal = document.getElementById(id); + if (modal) { + modal.removeAttribute('hidden'); + } + }); + }); +} diff --git a/app/static/js/components/settings-sections.js b/app/static/js/components/settings-sections.js new file mode 100644 index 0000000..79165d5 --- /dev/null +++ b/app/static/js/components/settings-sections.js @@ -0,0 +1,59 @@ +const STORAGE_KEY = 'stundenfuchs:settingsSection'; +const DESKTOP_SYNC_MEDIA_QUERY = '(min-width: 51.26em)'; + +function shouldSyncGroups() { + return window.matchMedia(DESKTOP_SYNC_MEDIA_QUERY).matches; +} + +function syncGroupState(section, isOpen) { + const groupName = section.dataset.syncGroup || ''; + if (!groupName || !shouldSyncGroups()) { + return; + } + + document.querySelectorAll(`[data-component="settings-section"][data-sync-group="${groupName}"]`).forEach((peer) => { + if (peer instanceof HTMLDetailsElement && peer !== section) { + peer.open = isOpen; + } + }); +} + +function openSectionById(sectionId) { + if (!sectionId) { + return; + } + const target = document.getElementById(sectionId); + if (!(target instanceof HTMLDetailsElement)) { + return; + } + target.open = true; + syncGroupState(target, true); +} + +export function initSettingsSections() { + const sections = Array.from(document.querySelectorAll('[data-component="settings-section"]')); + if (sections.length === 0) { + return; + } + + const hashTarget = window.location.hash ? window.location.hash.slice(1) : ''; + const storedTarget = window.sessionStorage.getItem(STORAGE_KEY) || ''; + openSectionById(hashTarget || storedTarget); + if (storedTarget) { + window.sessionStorage.removeItem(STORAGE_KEY); + } + + sections.forEach((section) => { + if (!(section instanceof HTMLDetailsElement) || !section.id) { + return; + } + section.addEventListener('toggle', () => { + syncGroupState(section, section.open); + }); + section.querySelectorAll('form').forEach((form) => { + form.addEventListener('submit', () => { + window.sessionStorage.setItem(STORAGE_KEY, section.id); + }); + }); + }); +} diff --git a/app/static/manifest.webmanifest b/app/static/manifest.webmanifest new file mode 100644 index 0000000..e451dce --- /dev/null +++ b/app/static/manifest.webmanifest @@ -0,0 +1,23 @@ +{ + "name": "Stundenfuchs", + "short_name": "Stundenfuchs", + "id": "/", + "start_url": "/dashboard", + "scope": "/", + "display": "standalone", + "background_color": "#2c2d2f", + "theme_color": "#2c2d2f", + "lang": "de-DE", + "icons": [ + { + "src": "/static/icons/pwa-192.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "/static/icons/pwa-512.png", + "sizes": "512x512", + "type": "image/png" + } + ] +} diff --git a/app/static/styles.css b/app/static/styles.css new file mode 100644 index 0000000..2903e8c --- /dev/null +++ b/app/static/styles.css @@ -0,0 +1 @@ +/* legacy stylesheet deprecated. Use /static/css/app.css only. */ diff --git a/app/static/theme.js b/app/static/theme.js new file mode 100644 index 0000000..85dce64 --- /dev/null +++ b/app/static/theme.js @@ -0,0 +1 @@ +// legacy script deprecated. Use /static/js/app.js only. diff --git a/app/templates/base.html b/app/templates/base.html new file mode 100644 index 0000000..be38f71 --- /dev/null +++ b/app/templates/base.html @@ -0,0 +1,46 @@ + + + + + + + + + + + {% set __page_title %}{% block title %}{{ app_name }}{% endblock %}{% endset %} + + {% if __page_title | trim == app_name %}{{ app_title }}{% else %}{{ app_title }} - {{ __page_title | trim }}{% endif %} + + + + + + + + + {% block head_extra %}{% endblock %} + + + {% include "partials/header.html" %} +
{%- block page_actions -%}{%- endblock -%}
+
+ {% include "partials/flash.html" %} + {% block content %}{% endblock %} +
+ {% include "partials/footer.html" %} + {% include "partials/version_badge.html" %} + + {% block scripts %}{% endblock %} + + diff --git a/app/templates/pages/bulk_entry.html b/app/templates/pages/bulk_entry.html new file mode 100644 index 0000000..01f7f88 --- /dev/null +++ b/app/templates/pages/bulk_entry.html @@ -0,0 +1,65 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button, link_button %} +{% from "ui/form_field.html" import input_field, select_field, textarea_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Mehrfacheingabe{% endblock %} +{% block content %} + {{ page_header('Mehrere Tage / Wochen bearbeiten', 'Zeitraum und Wochentage auswählen, dann Zeiten gesammelt für alle passenden Tage setzen.') }} + {% call card('form-card full-width') %} +
+ + +
+ {{ input_field('Von', 'from_date', type='date', value=from_date, required=true) }} + {{ input_field('Bis', 'to_date', type='date', value=to_date, required=true) }} +
+
+ Wochentage +
+ {% for option in weekday_options %} + + {% endfor %} +
+
+
+ {{ input_field('Beginn', 'start_time', type='time', value=start_time, required=true, attrs='data-break-input=\"start\"') }} + {{ input_field('Ende', 'end_time', type='time', value=end_time, required=true, attrs='data-break-input=\"end\"') }} +
+
+ {{ input_field('Pause (Minuten)', 'break_minutes', type='number', value=break_minutes, required=true, attrs='min="0" step="1" data-break-input=\"minutes\"') }} + {{ select_field('Modus', 'mode', [ + {'value': 'only_missing', 'label': 'Nur leere Tage anlegen'}, + {'value': 'upsert', 'label': 'Bestehende Einträge aktualisieren + fehlende anlegen'} + ], bulk_mode, required=true) }} +
+ {% if user.automatic_break_rules_enabled %} +
+

+ {% if break_mode == 'manual' %} + Pause manuell gesetzt. Gesetzliche Mindestpause wird nicht automatisch überschrieben. + {% else %} + Gesetzliche Mindestpause nach deutschem Arbeitsrecht wird automatisch vorgeschlagen. + {% endif %} +

+ +
+ {% endif %} + {{ textarea_field('Notiz (optional)', 'notes', notes, 3) }} + +
+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/contact.html b/app/templates/pages/contact.html new file mode 100644 index 0000000..1533c09 --- /dev/null +++ b/app/templates/pages/contact.html @@ -0,0 +1,76 @@ +{% extends "base.html" %} +{% from "ui/button.html" import button %} +{% from "ui/card.html" import card %} +{% from "ui/page_header.html" import page_header %} + +{% block title %}Kontakt{% endblock %} +{% block page_class %}contact-page{% endblock %} + +{% block content %} + {{ page_header("Kontakt", "Schicke eine Nachricht bei Problemen, Fehlermeldungen oder Funktionswünschen. Antworten erfolgen per E-Mail.") }} + +
+ {% call card('contact-card') %} +
+ + + + +
+ + +
+ + + + + + + +

+ Bitte keine sensiblen Passwörter oder Zugangsdaten mitsenden. Anhänge sind in dieser ersten Version noch nicht möglich. +

+ + {{ button("Nachricht senden", type="submit") }} +
+ {% endcall %} + + {% call card('contact-card contact-info-card') %} +

Wofür ist das gedacht?

+ +

+ Nachrichten werden intern als Ticket gespeichert. So gehen Rückmeldungen nicht verloren und können strukturiert bearbeitet werden. +

+

+ Hinweise zu Anbieter und Datenschutz findest du ebenfalls unten im Footer über Impressum und Datenschutz. +

+ {% endcall %} +
+{% endblock %} diff --git a/app/templates/pages/dashboard.html b/app/templates/pages/dashboard.html new file mode 100644 index 0000000..1c9c84d --- /dev/null +++ b/app/templates/pages/dashboard.html @@ -0,0 +1,45 @@ +{% extends "base.html" %} +{% from "ui/day_row.html" import day_row with context %} +{% from "ui/kpi_bar.html" import kpi_bar with context %} +{% from "ui/week_header_bar.html" import week_header_bar with context %} +{% from "ui/icon_button.html" import icon_link with context %} +{% from "ui/warning_components.html" import workhours_target_warning_banner with context %} +{% block title %}Wochenansicht{% endblock %} +{% block body_class %}dashboard-theme{% endblock %} +{% block page_class %}dashboard-page{% endblock %} +{% block content %} + {% set return_to = request.url.path ~ ('?' ~ request.url.query if request.url.query else '') %} + +
+ {% call week_header_bar('/dashboard?date=' ~ previous_week.isoformat(), '/dashboard?date=' ~ next_week.isoformat(), 'KW ' ~ week.week_start.isocalendar()[1] ~ ' (' ~ week.week_start.strftime('%d.%m.') ~ ' - ' ~ week.week_end.strftime('%d.%m.%Y') ~ ')') %} +
+ + + + + +
+ {{ icon_link('/entry/new?date=' ~ selected_date.isoformat(), '/static/icons/add.svg', 'Tag hinzufügen') }} + {{ icon_link('/bulk-entry?from=' ~ week.week_start.isoformat() ~ '&to=' ~ week.week_end.isoformat(), '/static/icons/batch.svg', 'Mehrere Tage bearbeiten') }} + {{ icon_link('/export', '/static/icons/export.svg', 'Export') }} + {% endcall %} + + {{ kpi_bar([ + {'label': 'IST', 'value': ('%.2f'|format(week.weekly_ist / 60) )|replace('.00', '')}, + {'label': 'SOLL', 'value': ('%.2f'|format(week.weekly_soll / 60))|replace('.00', '')}, + {'label': 'DELTA', 'value': ('%.2f'|format(week.weekly_delta / 60))|replace('.00', ''), 'value_class': 'negative' if week.weekly_delta < 0 else 'positive'}, + {'label': 'KUMULIERT', 'value': ('%.2f'|format(week.cumulative_delta / 60))|replace('.00', ''), 'value_class': 'negative' if week.cumulative_delta < 0 else 'positive'} + ], 'kpi-bar--week') }} + + {{ workhours_target_warning_banner(workhours_target_warning) }} + +
+ {% for day in week.days %} + {{ day_row(day, csrf_token, weekday_name_de(day.date) ~ ', ' ~ day.date.strftime('%d.%m.%Y'), return_to, 'week') }} + {% endfor %} +
+
+{% endblock %} diff --git a/app/templates/pages/day_status_form.html b/app/templates/pages/day_status_form.html new file mode 100644 index 0000000..9096c29 --- /dev/null +++ b/app/templates/pages/day_status_form.html @@ -0,0 +1,49 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import link_button %} +{% from "ui/page_header.html" import page_header %} +{% block title %}{{ title }}{% endblock %} +{% block content %} + {{ page_header(title, selected_date.strftime('%d.%m.%Y')) }} + {% call card('form-card') %} +
+ {% if has_entry %} +

An diesem Tag ist bereits regulaere Arbeitszeit eingetragen. Bitte bearbeite zuerst den Zeiteintrag.

+ + {% else %} +

+ {% if is_active %} + {{ title }} ist aktuell gesetzt. Mit dem Speichern entfernst du diesen Status wieder. + {% else %} + {% if current_status_label %} + Aktuell ist {{ current_status_label }} gesetzt. Mit dem Speichern wird dieser Status ersetzt. + {% else %} + Hier kannst du diesen Status direkt für den ausgewählten Tag setzen oder wieder entfernen. + {% endif %} + {% endif %} +

+ {% if day_overtime_adjustment_minutes %} +

+ Zusaetzlicher Stundenausgleich aktiv: + {{ '+' if day_overtime_adjustment_minutes > 0 else '' }}{{ minutes_to_hhmm(day_overtime_adjustment_minutes) }} +

+ {% endif %} +
+ + + + {% if status_key != 'vacation' %} + + {% endif %} + +
+ {% endif %} +
+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/email_verification_resend.html b/app/templates/pages/email_verification_resend.html new file mode 100644 index 0000000..2141e85 --- /dev/null +++ b/app/templates/pages/email_verification_resend.html @@ -0,0 +1,19 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}E-Mail bestätigen{% endblock %} +{% block content %} + {{ page_header("Bestätigungslink anfordern") }} + {% call card('auth-card') %} +
+ + {{ input_field('E-Mail-Adresse', 'email', type='email', required=true, autocomplete='username') }} + {{ button('Link senden', type='submit') }} +
+

+ Zur Anmeldung +

+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/email_verification_result.html b/app/templates/pages/email_verification_result.html new file mode 100644 index 0000000..987158f --- /dev/null +++ b/app/templates/pages/email_verification_result.html @@ -0,0 +1,21 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/page_header.html" import page_header %} +{% from "ui/flash.html" import alert %} +{% block title %}E-Mail bestätigen{% endblock %} +{% block content %} + {{ page_header("E-Mail-Bestätigung") }} + {% call card('auth-card') %} + {% if success %} + {{ alert(message, 'success') }} + {% else %} + {{ alert(message, 'error') }} + {% endif %} +

+ Neuen Bestätigungslink anfordern +

+

+ Zur Anmeldung +

+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/entry_form.html b/app/templates/pages/entry_form.html new file mode 100644 index 0000000..ccb620d --- /dev/null +++ b/app/templates/pages/entry_form.html @@ -0,0 +1,50 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button, link_button %} +{% from "ui/form_field.html" import input_field, textarea_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}{{ title }}{% endblock %} +{% block content %} + {{ page_header(title, "Nur fuer regulaere Arbeitszeit." ~ (" Gesetzliche Pausen koennen automatisch beruecksichtigt werden." if user.automatic_break_rules_enabled else "")) }} + {% call card('form-card') %} +
+ + + + {{ input_field('Datum', 'date', type='date', value=(entry.date if entry and entry.date else selected_date.isoformat()), required=true) }} + {{ input_field('Arbeitsbeginn', 'start_time', type='time', value=(entry.start_time if entry else ''), required=true, attrs='data-break-input=\"start\"') }} + {{ input_field('Arbeitsende', 'end_time', type='time', value=(entry.end_time if entry else ''), required=true, attrs='data-break-input=\"end\"') }} + {% if full_day_net_minutes is not none %} + + {% endif %} + {{ input_field('Pause in Minuten', 'break_minutes', type='number', value=(entry.break_minutes if entry and entry.break_minutes is not none else 0), required=true, attrs='min=\"0\" step=\"1\" data-break-input=\"minutes\"') }} + {% if user.automatic_break_rules_enabled %} +
+

+ {% if entry and entry.break_mode == 'manual' %} + Pause manuell gesetzt. Gesetzliche Mindestpause wird nicht automatisch überschrieben. + {% else %} + Gesetzliche Mindestpause nach deutschem Arbeitsrecht wird automatisch vorgeschlagen. + {% endif %} +

+ +
+ {% endif %} + {{ textarea_field('Notiz (optional)', 'notes', (entry.notes if entry else ''), 3) }} +

Mit gespeicherter Arbeitszeit werden Urlaub, Feiertag, Krankheit und Stundenausgleich fuer diesen Tag entfernt.

+ +
+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/export.html b/app/templates/pages/export.html new file mode 100644 index 0000000..03ed1a4 --- /dev/null +++ b/app/templates/pages/export.html @@ -0,0 +1,26 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button, link_button %} +{% from "ui/form_field.html" import input_field, select_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Export{% endblock %} +{% block content %} + {{ page_header('Export', 'Zeitraum auf den Tag genau wählen und als Excel oder PDF herunterladen.') }} + {% call card('form-card') %} +
+ +
+ {{ input_field('Von', 'from_date', type='date', value=from_date, required=true) }} + {{ input_field('Bis', 'to_date', type='date', value=to_date, required=true) }} +
+ {{ select_field('Format', 'format', [ + {'value': 'xlsx', 'label': 'Excel (.xlsx)'}, + {'value': 'pdf', 'label': 'PDF (.pdf)'} + ], 'xlsx', required=true) }} + +
+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/help.html b/app/templates/pages/help.html new file mode 100644 index 0000000..0ce11e9 --- /dev/null +++ b/app/templates/pages/help.html @@ -0,0 +1,206 @@ +{% extends "base.html" %} +{% from "ui/page_header.html" import page_header %} +{% from "ui/help_section.html" import help_section %} + +{% block title %}Hilfe{% endblock %} +{% block page_class %}help-page{% endblock %} + +{% block content %} + {{ page_header("Hilfe", "Hier findest du alle wichtigen Funktionen des Stundenfuchs verständlich erklärt.") }} + +
+

+ Stundenfuchs hilft dir dabei, Arbeitszeiten, Urlaub, Feiertage, Krankheitstage und Überstunden an einem Ort zu verwalten. + Die App ist so aufgebaut, dass du im Alltag schnell arbeiten kannst, ohne jede Berechnung selbst im Kopf machen zu müssen. +

+

+ Wenn du neu startest, beginne am besten mit der Wochenansicht. Dort kannst du Tage eintragen, bearbeiten und direkt sehen, + wie sich deine Stunden verändern. +

+
+ +
+
+

Schritt-für-Schritt-Anleitungen

+

Die wichtigsten Abläufe einmal komplett erklärt. Du kannst diese Anleitungen direkt nacheinander durchgehen.

+
+
+
+

Einen normalen Arbeitstag eintragen

+
    +
  1. Gehe in die Wochen- oder Monatsansicht.
  2. +
  3. Klicke beim gewünschten Tag auf + oder auf Bearbeiten.
  4. +
  5. Wähle Zeit, wenn du einen normalen Arbeitstag erfassen möchtest.
  6. +
  7. Trage Arbeitsbeginn und Arbeitsende ein. Wenn du die automatische Pausenregel aktiviert hast, schlägt Stundenfuchs die gesetzliche Mindestpause automatisch vor.
  8. +
  9. Du kannst die Pause trotzdem jederzeit manuell ändern. Dann bleibt dein eigener Wert maßgeblich.
  10. +
  11. Speichere den Eintrag. Die App berechnet Nettozeit, Ist-Stunden und Delta automatisch.
  12. +
+
+
+

Urlaub, Feiertag oder Krankheit eintragen

+
    +
  1. Klicke beim gewünschten Tag auf +.
  2. +
  3. Wähle Urlaub, Feiertag oder Krankheit.
  4. +
  5. Der Status wird sofort gesetzt. Danach erscheint der Tag direkt in der Liste mit dem passenden Kürzel.
  6. +
  7. Wenn nötig, kannst du den Status später wieder ändern oder entfernen.
  8. +
+
+
+

Stundenausgleich buchen

+
    +
  1. Klicke am gewünschten Tag auf + und wähle Stundenausgleich.
  2. +
  3. Entscheide dich für eine der drei Varianten: Stunden, Von-Bis Uhrzeit oder Ganzer Tag.
  4. +
  5. Wähle, ob der Ausgleich positiv oder negativ sein soll.
  6. +
  7. Speichere den Eintrag. In der Liste erscheint der Tag danach mit dem S-Symbol.
  8. +
  9. Der Ausgleich verändert direkt deinen Überstundenstand, ohne als normale Arbeitszeit zu zählen.
  10. +
+
+
+

Arbeitsstunden-Counter einrichten

+
    +
  1. Öffne die Einstellungen.
  2. +
  3. Gehe zum Bereich Arbeitsstunden-Counter.
  4. +
  5. Aktiviere den Counter und trage Start- und Enddatum ein.
  6. +
  7. Optional kannst du Zusatzstunden, ein Ziel und die Anzeige im Header aktivieren.
  8. +
  9. Speichere die Einstellungen. Danach siehst du deinen Stand direkt im Einstellungsbereich und auf Wunsch oben im Header.
  10. +
+
+
+

Backup importieren

+
    +
  1. Öffne die Einstellungen oder nutze den Backup-Upload direkt in der Registrierung.
  2. +
  3. Wähle deine Backup-Datei aus.
  4. +
  5. Entscheide, ob du deine Daten zusammenführen oder vollständig ersetzen möchtest.
  6. +
  7. Prüfe die Vorschau mit Datensatzanzahl und Konflikten.
  8. +
  9. Starte erst danach den eigentlichen Import.
  10. +
+
+
+
+ +
+ {% call help_section("Schnellstart", "So kommst du am schnellsten zu einem sauberen Stundenstand.") %} +
    +
  1. Lege in den Einstellungen deine Wochenstunden und relevanten Arbeitstage fest.
  2. +
  3. Trage in der Wochenansicht deine regulären Arbeitstage ein oder markiere Urlaub, Feiertag oder Krankheit.
  4. +
  5. Nutze den Stundenausgleich, wenn du Plus- oder Minusstunden ohne normale Arbeitszeit buchen möchtest.
  6. +
  7. Kontrolliere oben in den Kacheln dein aktuelles Delta und den kumulierten Stand.
  8. +
+ {% endcall %} + + {% call help_section("Die Kopfzeile", "Die Leiste oben zeigt dir jederzeit die wichtigsten Werte.") %} + + {% endcall %} + + {% call help_section("Wochenansicht", "Die Wochenansicht ist der schnellste Weg für den Alltag.") %} + + {% endcall %} + + {% call help_section("Monatsansicht", "Die Monatsansicht eignet sich gut für Rückblicke und längere Zeiträume.") %} + + {% endcall %} + + {% call help_section("Arbeitszeit eintragen", "Für normale Arbeitstage nutzt du immer den Zeiteintrag.") %} + + {% endcall %} + + {% call help_section("Urlaub, Feiertag und Krankheit", "Diese Tagesarten beeinflussen deine Berechnungen anders als normale Arbeitszeit.") %} + + {% endcall %} + + {% call help_section("Stundenausgleich (S)", "Damit kannst du Überstunden oder Minusstunden direkt verändern, ohne normale Arbeitszeit einzutragen.") %} + + {% endcall %} + + {% call help_section("Arbeitsstunden-Counter", "Dieser Bereich ist unabhängig von deinem Überstundenkonto.") %} + + {% endcall %} + + {% call help_section("Automatischer Modus", "Wenn du nicht jeden Standard-Arbeitstag einzeln eintragen möchtest.") %} + + {% endcall %} + + {% call help_section("Einstellungen", "Hier steuerst du die Regeln, nach denen Stunden berechnet werden.") %} + + {% endcall %} + + {% call help_section("Backup und Wiederherstellung", "So sicherst du deine Daten und spielst sie später wieder ein.") %} + + {% endcall %} + + {% call help_section("Praktische Tipps", "Diese Hinweise vermeiden typische Fehler im Alltag.") %} + + {% endcall %} +
+ +
+

Wenn etwas nicht passt

+

+ Die meisten Abweichungen entstehen durch falsche Wochenstunden, unpassende relevante Arbeitstage oder einen gesetzten Sonderstatus. + Prüfe in diesem Fall zuerst die Tageszeile und danach die Einstellungen. Wenn der Fehler bleibt, kannst du über den Footer die Kontaktseite nutzen. +

+
+{% endblock %} diff --git a/app/templates/pages/landing.html b/app/templates/pages/landing.html new file mode 100644 index 0000000..54bf5cb --- /dev/null +++ b/app/templates/pages/landing.html @@ -0,0 +1,24 @@ +{% extends "base.html" %} +{% from "ui/button.html" import link_button %} +{% block title %}Stundenfuchs{% endblock %} +{% block page_class %}landing-page{% endblock %} +{% block content %} +
+
+ +
+

Arbeitszeit, Urlaub und Überstunden an einem Ort

+

Stundenfuchs bringt Ordnung in deinen Arbeitsalltag.

+

+ Dokumentiere deine Arbeitszeit übersichtlich, behalte Urlaub und Fehlzeiten im Blick und lass dir Sollstunden, Saldo und wichtige Auswertungen automatisch berechnen. +

+
+ {{ link_button('Jetzt registrieren', '/register', extra_class='landing-cta-primary') }} + {{ link_button('Einloggen', '/login', variant='ghost', extra_class='landing-cta-secondary') }} +
+
+
+
+{% endblock %} diff --git a/app/templates/pages/legal_page.html b/app/templates/pages/legal_page.html new file mode 100644 index 0000000..cd14db2 --- /dev/null +++ b/app/templates/pages/legal_page.html @@ -0,0 +1,13 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/page_header.html" import page_header %} + +{% block title %}{{ title }}{% endblock %} +{% block page_class %}legal-page{% endblock %} + +{% block content %} + {{ page_header(title, subtitle or '') }} + {% call card('legal-card') %} + + {% endcall %} +{% endblock %} diff --git a/app/templates/pages/login.html b/app/templates/pages/login.html new file mode 100644 index 0000000..cfae38a --- /dev/null +++ b/app/templates/pages/login.html @@ -0,0 +1,28 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Anmeldung{% endblock %} +{% block content %} + {{ page_header("Anmeldung") }} + {% call card('auth-card') %} +
+ + {{ input_field('E-Mail', 'email', type='email', required=true, autocomplete='username') }} + {{ input_field('Passwort', 'password', type='password', required=true, autocomplete='current-password') }} + {{ button('Einloggen', type='submit') }} +
+ + {% endcall %} +{% endblock %} diff --git a/app/templates/pages/mfa_challenge.html b/app/templates/pages/mfa_challenge.html new file mode 100644 index 0000000..0e3aa4d --- /dev/null +++ b/app/templates/pages/mfa_challenge.html @@ -0,0 +1,27 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Zwei-Faktor-Anmeldung{% endblock %} +{% block content %} + {{ page_header("Zwei-Faktor-Anmeldung", "Methode: " ~ mfa_method_label) }} + {% call card('auth-card') %} +
+ + {{ input_field('6-stelliger Code', 'code', type='text', required=true, attrs='inputmode="numeric" pattern="[0-9]{6}" minlength="6" maxlength="6"') }} + {{ button('Code prüfen', type='submit') }} +
+ {% if mfa_is_email %} +
+ + {{ button('Neuen Code senden', type='submit', variant='ghost') }} +
+ {% endif %} +

+ Zurück zur Anmeldung +

+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/month.html b/app/templates/pages/month.html new file mode 100644 index 0000000..9b81476 --- /dev/null +++ b/app/templates/pages/month.html @@ -0,0 +1,46 @@ +{% extends "base.html" %} +{% from "ui/day_row.html" import day_row with context %} +{% from "ui/kpi_bar.html" import kpi_bar with context %} +{% from "ui/month_header_bar.html" import month_header_bar with context %} +{% from "ui/week_group_header.html" import week_group_header with context %} +{% from "ui/week_group_card_mobile.html" import week_group_card_mobile with context %} +{% from "ui/icon_button.html" import icon_link with context %} +{% from "ui/warning_components.html" import workhours_target_warning_banner with context %} +{% block title %}Monatsansicht{% endblock %} +{% block page_class %}month-page{% endblock %} +{% block content %} + {% set month_names = ["Januar", "Februar", "März", "April", "Mai", "Juni", "Juli", "August", "September", "Oktober", "November", "Dezember"] %} + {% set return_to = request.url.path ~ ('?' ~ request.url.query if request.url.query else '') %} + {% set month_prev_url = '/month?month=' ~ previous_month.strftime('%Y-%m') ~ '&view=' ~ view_mode %} + {% set month_next_url = '/month?month=' ~ next_month.strftime('%Y-%m') ~ '&view=' ~ view_mode %} + +
+ {% call month_header_bar(month_prev_url, month_next_url, month_names[month_start.month - 1] ~ ' ' ~ month_start.year) %} + {{ icon_link('/entry/new?date=' ~ month_start.isoformat(), '/static/icons/add.svg', 'Tag hinzufügen') }} + {{ icon_link('/bulk-entry?from=' ~ month_start.isoformat() ~ '&to=' ~ month_end.isoformat(), '/static/icons/batch.svg', 'Mehrfacheingabe') }} + {{ icon_link('/export?from=' ~ month_start.isoformat() ~ '&to=' ~ month_end.isoformat(), '/static/icons/export.svg', 'Export') }} + {% endcall %} + + {{ kpi_bar([ + {'label': 'IST', 'value': ('%.2f'|format(month_ist / 60) )|replace('.00', '')}, + {'label': 'SOLL', 'value': ('%.2f'|format(month_soll / 60))|replace('.00', '')}, + {'label': 'DELTA', 'value': ('%.2f'|format(month_delta / 60))|replace('.00', ''), 'value_class': 'negative' if month_delta < 0 else 'positive'}, + {'label': 'KUMULIERT', 'value': ('%.2f'|format(header_cumulative_minutes / 60))|replace('.00', ''), 'value_class': 'negative' if header_cumulative_minutes < 0 else 'positive'} + ], 'kpi-bar--month') }} + + {{ workhours_target_warning_banner(workhours_target_warning) }} + +
+ {% for week in weeks %} + {% call week_group_card_mobile(week, csrf_token, return_to) %} + {{ week_group_header(week, csrf_token, return_to) }} +
+ {% for day in week.days %} + {{ day_row(day, csrf_token, weekday_name_de(day.date) ~ ', ' ~ day.date.strftime('%d.%m.%Y'), return_to, 'month') }} + {% endfor %} +
+ {% endcall %} + {% endfor %} +
+
+{% endblock %} diff --git a/app/templates/pages/overtime_adjustment_form.html b/app/templates/pages/overtime_adjustment_form.html new file mode 100644 index 0000000..065edc0 --- /dev/null +++ b/app/templates/pages/overtime_adjustment_form.html @@ -0,0 +1,101 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button, link_button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}{{ title }}{% endblock %} +{% block content %} + {{ page_header(title, selected_date.strftime('%d.%m.%Y')) }} + {% call card('form-card') %} +
+ {% if has_entry %} +

An diesem Tag ist bereits regulaere Arbeitszeit eingetragen. Stundenausgleich ist dann nicht verfuegbar.

+ + {% else %} +

+ Baut Ueberstunden direkt auf oder ab. Der Eintrag wirkt nur auf den Saldo, nicht auf Ist-Stunden oder den Arbeitsstunden-Counter. +

+ {% if day_is_vacation %} +

Fuer diesen Tag ist zusaetzlich Urlaub gesetzt.

+ {% elif day_special_status == 'holiday' %} +

Fuer diesen Tag ist zusaetzlich Feiertag gesetzt.

+ {% elif day_special_status == 'sick' %} +

Fuer diesen Tag ist zusaetzlich Krankheit gesetzt.

+ {% endif %} + {% if overtime_adjustment_error %} +

{{ overtime_adjustment_error }}

+ {% endif %} + {% if day_overtime_adjustment_minutes %} +

+ Aktuell gesetzt: + {{ '+' if day_overtime_adjustment_minutes > 0 else '' }}{{ minutes_to_hhmm(day_overtime_adjustment_minutes) }} +

+ {% endif %} + +
+ +
+ + + + +

Stunden

+ {{ input_field('Manuell (+HH:MM oder -HH:MM)', 'adjustment_value', type='text', value='', attrs='placeholder=\"+02:30\"') }} + +
+ +
+ +
+ + + + +

Von-Bis Uhrzeit

+
+ {{ input_field('Von', 'interval_start_time', type='time', value='', required=true) }} + {{ input_field('Bis', 'interval_end_time', type='time', value='', required=true) }} +
+ +
+ +
+ +
+ + + + +

Ganzer Tag

+

Der Tageswert wird aus Wochenstunden geteilt durch die relevanten Arbeitstage berechnet.

+ +
+ +
+ + + {% endif %} +
+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/password_reset_confirm.html b/app/templates/pages/password_reset_confirm.html new file mode 100644 index 0000000..886b1cb --- /dev/null +++ b/app/templates/pages/password_reset_confirm.html @@ -0,0 +1,23 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Neues Passwort setzen{% endblock %} +{% block content %} + {{ page_header("Neues Passwort setzen") }} + {% call card('auth-card') %} + {% if token %} +
+ + + {{ input_field('Neues Passwort (mindestens 10 Zeichen)', 'new_password', type='password', required=true, autocomplete='new-password', attrs='minlength="10"') }} + {{ input_field('Neues Passwort wiederholen', 'new_password_repeat', type='password', required=true, autocomplete='new-password', attrs='minlength="10"') }} + {{ button('Passwort speichern', type='submit') }} +
+ {% endif %} +

+ Zur Anmeldung +

+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/password_reset_request.html b/app/templates/pages/password_reset_request.html new file mode 100644 index 0000000..ebf2017 --- /dev/null +++ b/app/templates/pages/password_reset_request.html @@ -0,0 +1,19 @@ +{% extends "base.html" %} +{% from "ui/card.html" import card %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% from "ui/page_header.html" import page_header %} +{% block title %}Passwort zurücksetzen{% endblock %} +{% block content %} + {{ page_header('Passwort zurücksetzen', 'Gib deine E-Mail ein. Du erhältst einen Link zum Setzen eines neuen Passworts.') }} + {% call card('auth-card') %} +
+ + {{ input_field('E-Mail', 'email', type='email', required=true, autocomplete='username') }} + {{ button('Reset-Link senden', type='submit') }} +
+

+ Zur Anmeldung +

+ {% endcall %} +{% endblock %} diff --git a/app/templates/pages/register.html b/app/templates/pages/register.html new file mode 100644 index 0000000..d02ccf3 --- /dev/null +++ b/app/templates/pages/register.html @@ -0,0 +1,171 @@ +{% extends "base.html" %} +{% from "ui/button.html" import button %} +{% from "ui/form_field.html" import input_field %} +{% block title %}Registrierung{% endblock %} +{% block body_class %}register-theme{% endblock %} +{% block page_class %}register-page{% endblock %} +{% block content %} +
+

Registrierung

+ +
+ + +
+ {{ input_field('E-Mail', 'email', type='email', required=true, autocomplete='username') }} + {{ input_field('Passwort (mindestens 10 Zeichen)', 'password', type='password', required=true, autocomplete='new-password', attrs='minlength="10"') }} +

Nach dem Anlegen bestätigst du deine E-Mail-Adresse über einen Link.

+
+ +
+

Backup importieren

+

Wenn du bereits eine Sicherung aus Stundenfuchs hast, kannst du sie direkt bei der Registrierung einspielen.

+ +

+ Deine E-Mail-Adresse, dein Passwort und deine gewählte Zwei-Faktor-Anmeldung bleiben erhalten. + Arbeitsdaten und fachliche Einstellungen werden aus dem Backup übernommen. +

+
+ +

+ Alle folgenden Einstellungen sind optional und können auch später in den Einstellungen geändert werden. +

+ +
+

Bundesland

+

für das automatische Festlegen von Feiertagen

+ +
+ +
+

Urlaub

+

Wieviele Urlaubstage pro Jahr stehen dir zur Verfügung

+ {{ input_field('', 'vacation_days_total', type='number', placeholder='z. B. 30', attrs='min="0" max="365" step="1"') }} + {{ input_field('Wochenstunden (Standard)', 'weekly_target_hours', type='number', value='25', attrs='min="0.25" step="0.25"') }} +
+ +
+

Erfassungsmodus

+

Lege fest, ob du deine Arbeitstage komplett selbst pflegst oder ob Stundenfuchs fehlende Arbeitstage bis heute automatisch ergänzt.

+ +
+ +
+

Überstunden

+
+ {{ input_field('Startdatum für Überstundenberechnung', 'overtime_start_date', type='date', value=today_iso) }} + {{ input_field('Nach welchem Zeitraum verfallen Überstunden (Tage)', 'overtime_expiry_days', type='number', placeholder='optional', attrs='min="1" step="1"') }} +
+ +
+ +
+

Gesamtarbeitsstunden

+

z. B. für die Übersicht zu geleisteten Praxisstunden im Anerkennungsjahr der Erzieherausbildung.

+ +
+ +
+

Gesamtarbeitsstunden

+

z. B. für die Übersicht zu geleisteten Praxisstunden im Anerkennungsjahr der Erzieherausbildung.

+
+ {{ input_field('Counter Startdatum', 'workhours_counter_start_date', type='date') }} + {{ input_field('Counter Enddatum', 'workhours_counter_end_date', type='date') }} +
+ {{ input_field('Bereits geleistete Zusatzstunden (optional)', 'workhours_counter_manual_offset_hours', type='number', placeholder='z. B. 80', attrs='min="0" step="0.25"') }} +

Zusätzlich geleistete Stunden, z.B. aus Praktika

+ {{ input_field('Gesamtstundenziel (in Stunden)', 'workhours_counter_target_hours', type='number', placeholder='z. B. 1200', attrs='min="0.25" step="0.25"') }} +
+ + +
+
+ +
+

Relevante Arbeitstage

+

+ Diese Tage steuern Soll und Urlaubslogik. Beispiel: Wenn du nur Montag bis Donnerstag arbeitest, werden Sollstunden auf diese vier Tage verteilt. +

+
+
+ {% for weekday in weekday_options %} + + {% endfor %} +
+
+
+ + + + +
+
+ +
+

Zwei-Faktor-Anmeldung

+

Zur Erhöhung der Sicherheit

+ + {% if not email_mfa_available %} +

Hinweis: E-Mail-2FA ist aktuell nicht verfügbar, da kein Mailserver konfiguriert ist.

+ {% endif %} +
+ +
+ {{ button('Konto anlegen', type='submit', extra_class='register-submit') }} +
+
+ + +
+{% endblock %} diff --git a/app/templates/pages/settings.html b/app/templates/pages/settings.html new file mode 100644 index 0000000..ef36ef1 --- /dev/null +++ b/app/templates/pages/settings.html @@ -0,0 +1,812 @@ +{% extends "base.html" %} +{% from "ui/segmented_toggle.html" import segmented_toggle %} +{% from "ui/collapsible_section.html" import collapsible_section %} +{% block title %}Einstellungen{% endblock %} +{% block body_class %}settings-theme{% endblock %} +{% block page_class %}settings-page{% endblock %} +{% block content %} +
+

Einstellungen

+
+ {% if is_admin %} + {{ segmented_toggle([ + {'href': '/settings?tab=settings', 'label': 'Einstellungen', 'active': active_settings_tab != 'admin'}, + {'href': '/settings?tab=admin', 'label': 'Admin', 'active': active_settings_tab == 'admin'} + ], 'Einstellungsbereiche', 'settings-tabs') }} + {% endif %} +
+ {% if not is_admin or active_settings_tab != 'admin' %} + {% call collapsible_section('Urlaub', 'settings-vacation') %} +

Lege hier deine Gesamturlaubstage pro Kalenderjahr fest. Im Header siehst du danach Resturlaub/Gesamturlaub.

+
+ + +
+ + +
+ + +
+

Definierte Urlaubstage reduzieren automatisch das Wochen-Soll für die betroffenen Wochen.

+
+ +
+ + +
+ + + +
+
+ {% for vacation in vacation_ranges %} +
+
+ {{ vacation.start_date.strftime("%d.%m.%Y") }} - {{ vacation.end_date.strftime("%d.%m.%Y") }} +

Effektive Urlaubstage unter Berücksichtigung deiner Arbeitstage.

+
+
+ + + + +
+
+ {% else %} +

Noch keine Urlaubszeiträume angelegt.

+ {% endfor %} +
+ {% endcall %} + {% call collapsible_section('Wochenstunden', 'settings-weekly-target') %} +

Lege fest, wie viele Stunden du generell pro Woche arbeiten möchtest (Standard-Soll).

+
+ + + + +

+ Dieser Wert wird für neue reguläre Arbeitszeiteinträge und automatische Einträge verwendet, solange die gesetzliche Pausenregel nicht aktiv ist. +

+ +
+ {% endcall %} + {% call collapsible_section('Standardansicht', 'settings-preferences') %} +
+ + + + +

+ Im automatischen Modus werden fehlende Einträge für deine Arbeitstage bis einschließlich heute automatisch angelegt. Abweichungen kannst du danach einzeln anpassen. +

+ +
+ {% endcall %} + {% call collapsible_section('Überstunden-Regeln', 'settings-overtime') %} +

Optionales Startdatum und Verfall für die kumulierte Überstunden-Berechnung.

+
+ +
+ + +
+ + +
+
+
+
+ Saldoaufbau gesamt +

{{ minutes_to_hhmm(overtime_adjustment_total_positive) }}

+
+
+ Saldoabbau gesamt +

{{ minutes_to_hhmm(overtime_adjustment_total_negative) }}

+
+
+ Ganze Tage +

{{ overtime_adjustment_full_day_count }}

+
+
+ {% for adjustment in overtime_adjustments %} +
+
+ {{ adjustment.date.strftime("%d.%m.%Y") }} +

{{ adjustment.notes or "Stundenausgleich" }}

+
+
+ + {{ '+' if adjustment.minutes > 0 else '' }}{{ minutes_to_hhmm(adjustment.minutes) }} + + Bearbeiten +
+
+ {% else %} +

Noch keine Ausgleichsstunden eingetragen.

+ {% endfor %} +
+ {% endcall %} + {% call collapsible_section('Arbeitsstunden-Counter', 'settings-workhours-counter') %} +
+

Hier kannst du einen Zeitraum festlegen und sehen, wie viele Stunden du darin insgesamt gesammelt hast. Das ist zum Beispiel hilfreich für Praxisstunden im Anerkennungsjahr.

+

Urlaub, Feiertage und Krankheit werden nur dann mitgezählt, wenn du das unter „Relevante Arbeitstage“ aktiviert hast.

+
+
+ + + +
+ + +
+
+ + +
+

Zum Beispiel bereits geleistete Praxis- oder Praktikumsstunden, die nicht im Tracker erfasst wurden.

+ + {% if not mail_settings_available %} +

Diese Funktion ist erst verfügbar, wenn ein E-Mail-Server eingerichtet wurde.

+ {% endif %} +

Beispiel: So kannst du deine Praxisstunden im Anerkennungsjahr im Blick behalten.

+ {% if user.workhours_counter_enabled %} +

+ {% if workhours_counter_minutes is not none %} + Aktueller Stand im gewählten Zeitraum: + {% else %} + Bitte gültigen Zeitraum setzen, um den Counter zu berechnen. + {% endif %} +

+ {% endif %} + {% if workhours_counter_warning %} +
+ + Bisher + + {% if workhours_counter_minutes is not none %} + {{ minutes_to_hhmm(workhours_counter_minutes) }} + {% else %} + -- + {% endif %} + + + + Ziel + {{ minutes_to_hhmm(workhours_counter_warning.target_minutes) }} + + + Prognose + {{ minutes_to_hhmm(workhours_counter_warning.projected_minutes) }} + +
+ {% if workhours_counter_warning.at_risk %} +

+ Bis zum Ziel fehlen voraussichtlich noch {{ minutes_to_hhmm(workhours_counter_warning.missing_minutes) }} +

+ {% endif %} + {% endif %} + +
+ {% endcall %} + {% call collapsible_section('Relevante Arbeitstage', 'settings-workdays') %} +

Diese Tage werden für Soll-/Delta-Berechnung verwendet (z. B. 4-Tage-Woche Mo-Do).

+
+ +
+ Arbeitstage +
+ {% for weekday in weekday_options %} + + {% endfor %} +
+
+
+ + + +
+ +
+ {% endcall %} + {% call collapsible_section('Sicherheit (2FA)', 'settings-mfa') %} + {% set mfa_totp_pending = mfa_setup_secret and user.mfa_method == 'none' %} + {% set mfa_selected_method = 'totp' if mfa_totp_pending else user.mfa_method %} +

+ Status: + + {% if mfa_totp_pending %} + TOTP-Einrichtung läuft + {% else %} + {{ mfa_method_labels.get(user.mfa_method, 'Unbekannt') }} + {% endif %} + +

+ {% if mfa_totp_pending %} +

2FA wird aktiviert, sobald du den aktuellen 6-stelligen Code aus deiner Authenticator-App bestätigst.

+ {% endif %} +
+ + + + + + {% if mfa_setup_secret %} +
+

+ TOTP-Setup aktiv: Hinterlege den folgenden Schlüssel oder die URI in deiner Authenticator-App und bestätige danach den Code. +

+ + +
+ {% endif %} + +
+ {% endcall %} +
+ {% call collapsible_section('Account', 'settings-account', 'settings-auth-card', 'account-security') %} +
+ + + +

+ Gesetzliche Feiertage deines Bundeslands werden automatisch als Feiertag markiert, sofern an diesen Tagen keine Arbeitszeit eingetragen ist. +

+ + +
+ {% endcall %} + {% call collapsible_section('Passwort ändern', 'settings-password', 'settings-auth-card', 'account-security') %} +
+ + + + + +
+ {% endcall %} +
+ {% call collapsible_section('Datenexport', 'settings-export') %} +

Lade hier alle bisher eingetragenen Daten herunter. Für Excel und PDF wird dein kompletter erfasster Zeitraum exportiert. Die Backup-Datei ist für Sicherung und späteren Import gedacht.

+
+ +
+ + + +
+
+

Die Backup-Datei enthält deine Einstellungen, Arbeitszeiteinträge, Urlaub, Sondertage, Soll-Historie und Stundenausgleich in einem strukturierten Format. Sicherheits- und Kontodaten sind nicht enthalten.

+ {% endcall %} + {% call collapsible_section('Backup importieren', 'settings-import') %} +

Du kannst eine zuvor exportierte Backup-Datei wieder einspielen. Dein Konto, dein Passwort und deine Sicherheitsdaten bleiben dabei unverändert.

+
+ + +

+ Zusammenführen behält bestehende Tagesdaten bei und ergänzt nur konfliktfreie Inhalte. Ersetzen löscht zuerst alle importierbaren Arbeits- und Einstellungsdaten deines Kontos. +

+ + +
+ {% if import_preview %} +
+
+
+

Importvorschau

+

+ Backup v{{ import_preview.backup_version }} + {% if import_preview.source_app_version %}• exportiert mit {{ import_preview.source_app_version }}{% endif %} + {% if import_preview.exported_at %}• {{ import_preview.exported_at }}{% endif %} +

+
+ {{ import_preview.mode_label }} +
+
+
+ {{ import_preview.counts.time_entries }} + Arbeitszeiteinträge +
+
+ {{ import_preview.counts.weekly_target_rules }} + Wochenziele +
+
+ {{ import_preview.counts.vacation_periods }} + Urlaubszeiträume +
+
+ {{ import_preview.counts.special_day_statuses }} + Sondertage +
+
+ {{ import_preview.counts.overtime_adjustments }} + Stundenausgleich +
+
+
+

+ Übernommen werden auch fachliche Einstellungen wie Wochenstunden, relevante Arbeitstage, Überstunden-Regeln, Arbeitsstunden-Counter und das Bundesland. +

+
    +
  • Konflikte Arbeitszeiteinträge: {{ import_preview.conflicts.time_entries }}
  • +
  • Konflikte Wochenziele: {{ import_preview.conflicts.weekly_target_rules }}
  • +
  • Konflikte Urlaubszeiträume: {{ import_preview.conflicts.vacation_periods }}
  • +
  • Konflikte Sondertage: {{ import_preview.conflicts.special_day_statuses }}
  • +
  • Konflikte Stundenausgleich: {{ import_preview.conflicts.overtime_adjustments }}
  • +
+
+
+ + + {% if import_preview.mode == 'replace_user_data' %} + + {% endif %} + +
+
+ {% endif %} + {% endcall %} + {% call collapsible_section('Konto löschen', 'settings-delete-account', 'danger-card') %} +

Wenn du dein Konto löschst, werden alle zugehörigen Daten dauerhaft entfernt: Arbeitszeiten, Urlaub, Sondertage, Stundenausgleich, Soll-Historie und persönliche Einstellungen.

+
+ + + + + +
+ {% endcall %} + {% endif %} + {% if is_admin and active_settings_tab == 'admin' %} + {% call collapsible_section('Benutzerverwaltung', 'settings-admin-users', 'admin-card') %} +

Aktive Admins: {{ admin_user_count }}

+
+ {% for managed in managed_users %} + {% set disable_delete = managed.id == user.id or (managed.role == 'admin' and managed.is_active and admin_user_count <= 1) %} +
+ +
+ {{ managed.email }} + erstellt: {{ managed.created_at.strftime("%d.%m.%Y") }} +
+ + + + +
+ {% endfor %} +
+ {% endcall %} + {% call collapsible_section('E-Mail-Server', 'settings-admin-email', 'admin-card') %} +

Wird für Passwort-Reset, E-Mail-MFA und Registrierungsmails verwendet.

+
+ +
+ + + + + + +
+
+ + + + + + +
+
+

Empfänger für Registrierungs-Infomails (aktive Admins)

+ {% if admin_recipients %} +
+ {% for admin_recipient in admin_recipients %} + + {% endfor %} +
+ {% else %} +

Keine aktiven Admins gefunden.

+ {% endif %} + {% if email_server.registration_notify_fallback_email %} +

+ Wenn keine Admins ausgewählt sind, wird die Fallback-Adresse + {{ email_server.registration_notify_fallback_email }} genutzt. +

+ {% endif %} +
+ +
+
+ + +
+ {% endcall %} + {% call collapsible_section('Rechtliches', 'settings-admin-legal', 'admin-card') %} +

Diese Inhalte werden öffentlich über den Footer unter Impressum und Datenschutz angezeigt. Markdown ist erlaubt und wird beim Anzeigen sicher bereinigt.

+
+ + + + +
+ {% endcall %} + {% call collapsible_section('Kontakt & Tickets', 'settings-admin-tickets', 'admin-card') %} +

Neue Nachrichten aus dem Kontaktformular werden hier als Tickets gesammelt. Für Benachrichtigungen werden dieselben Admin-Empfänger wie bei Registrierungs-Infomails verwendet.

+
+ {% for ticket in support_tickets %} +
+
+
+

{{ ticket.subject }}

+

+ {{ ticket_category_label(ticket.category) }} · {{ ticket_status_label(ticket.status) }} · {{ ticket.created_at.strftime("%d.%m.%Y %H:%M") }} UTC +

+
+
+ {{ ticket.email }} + {{ ticket.name or 'Ohne Namen' }} + {% if ticket.user_id %}Angemeldeter Nutzer{% endif %} +
+
+
+ {{ ticket.message }} +
+
+ +
+ + +
+ +
+
+ {% else %} +

Aktuell liegen keine Kontakt-Tickets vor.

+ {% endfor %} +
+ {% endcall %} + {% endif %} +
+{% endblock %} diff --git a/app/templates/partials/flash.html b/app/templates/partials/flash.html new file mode 100644 index 0000000..6313187 --- /dev/null +++ b/app/templates/partials/flash.html @@ -0,0 +1,8 @@ +{% from "ui/flash.html" import alert %} +{% set error_message = error if error is defined else None %} +{% if success_message %}{{ alert(success_message, "success") }}{% endif %} +{% if error_message %}{{ alert(error_message, "error") }}{% endif %} +{% if info_message %}{{ alert(info_message, "info") }}{% endif %} +{% if flash_messages %} + {% for flash in flash_messages %}{{ alert(flash.message, flash.level or "info") }}{% endfor %} +{% endif %} diff --git a/app/templates/partials/footer.html b/app/templates/partials/footer.html new file mode 100644 index 0000000..068e8fa --- /dev/null +++ b/app/templates/partials/footer.html @@ -0,0 +1,10 @@ + diff --git a/app/templates/partials/header.html b/app/templates/partials/header.html new file mode 100644 index 0000000..efebea2 --- /dev/null +++ b/app/templates/partials/header.html @@ -0,0 +1,38 @@ +{% set is_auth_header = (not user) and (request.url.path.startswith('/login') or request.url.path.startswith('/register')) %} + diff --git a/app/templates/partials/nav.html b/app/templates/partials/nav.html new file mode 100644 index 0000000..50c35df --- /dev/null +++ b/app/templates/partials/nav.html @@ -0,0 +1,38 @@ +{% from "ui/segmented_toggle.html" import segmented_toggle %} + +{% if user %} +
+ {{ segmented_toggle([ + {'href': main_nav_week_url, 'label': 'Woche', 'active': request.url.path.startswith('/dashboard')}, + {'href': main_nav_month_url, 'label': 'Monat', 'active': request.url.path.startswith('/month')} + ], 'Hauptnavigation', 'topbar-toggle app-main-nav') }} +
+ + + + + + +
+ + +
+
+
+{% else %} + {{ segmented_toggle([ + {'href': '/login', 'label': 'Login', 'active': request.url.path.startswith('/login')}, + {'href': '/register', 'label': 'Registrierung', 'active': request.url.path.startswith('/register')} + ], 'Authentifizierung', 'auth-toggle app-auth-nav') }} +{% endif %} diff --git a/app/templates/partials/version_badge.html b/app/templates/partials/version_badge.html new file mode 100644 index 0000000..4f68e4b --- /dev/null +++ b/app/templates/partials/version_badge.html @@ -0,0 +1,5 @@ +{% if user and user.role == 'admin' %} + + v{{ app_version }}{% if app_env != 'production' %} {{ app_env|capitalize }}{% endif %} + +{% endif %} diff --git a/app/templates/ui/badge.html b/app/templates/ui/badge.html new file mode 100644 index 0000000..5b9d0c6 --- /dev/null +++ b/app/templates/ui/badge.html @@ -0,0 +1,3 @@ +{% macro badge(text, tone='default', class_name='') -%} + {{ text }} +{%- endmacro %} diff --git a/app/templates/ui/button.html b/app/templates/ui/button.html new file mode 100644 index 0000000..87d2379 --- /dev/null +++ b/app/templates/ui/button.html @@ -0,0 +1,9 @@ +{% macro button(label, type='button', variant='primary', extra_class='', icon=None) -%} + +{%- endmacro %} +{% macro link_button(label, href, variant='primary', extra_class='') -%} + {{ label }} +{%- endmacro %} diff --git a/app/templates/ui/card.html b/app/templates/ui/card.html new file mode 100644 index 0000000..70b15df --- /dev/null +++ b/app/templates/ui/card.html @@ -0,0 +1,5 @@ +{% macro card(class_name='') -%} +
+ {{ caller() }} +
+{%- endmacro %} diff --git a/app/templates/ui/chip.html b/app/templates/ui/chip.html new file mode 100644 index 0000000..bc615c6 --- /dev/null +++ b/app/templates/ui/chip.html @@ -0,0 +1,3 @@ +{% macro chip(text, kind='default', extra_class='') -%} + {{ text }} +{%- endmacro %} diff --git a/app/templates/ui/collapsible_section.html b/app/templates/ui/collapsible_section.html new file mode 100644 index 0000000..c3309a4 --- /dev/null +++ b/app/templates/ui/collapsible_section.html @@ -0,0 +1,12 @@ +{% macro collapsible_section(title, section_id, classes='', sync_group='') -%} +
+ + {{ title }} + + +
{{ caller() }}
+
+{%- endmacro %} diff --git a/app/templates/ui/day_row.html b/app/templates/ui/day_row.html new file mode 100644 index 0000000..1be4fee --- /dev/null +++ b/app/templates/ui/day_row.html @@ -0,0 +1,127 @@ +{% from "ui/chip.html" import chip %} + +{% macro status_badge_form(csrf_token, day, return_to, kind) -%} + {% if kind == 'vacation' %} +
+ + + + +
+ {% elif kind == 'holiday' %} +
+ + + + + +
+ {% elif kind == 'sick' %} +
+ + + + + +
+ {% elif kind == 'overtime' %} +
+ + + + +
+ {% endif %} +{%- endmacro %} + +{% macro add_menu_status_form(action_url, csrf_token, day, return_to, label, status='') -%} +
+ + + + {% if status %}{% endif %} + +
+{%- endmacro %} + +{% macro day_row(day, csrf_token, date_label, return_to='/dashboard', mode='week') -%} + {% set is_vacation = day.is_vacation if day.is_vacation is defined else false %} + {% set special_status = day.special_status if day.special_status is defined else None %} + {% set overtime_adjustment_minutes = day.overtime_adjustment_minutes if day.overtime_adjustment_minutes is defined else 0 %} + {% set has_status = is_vacation or special_status in ['holiday', 'sick'] or overtime_adjustment_minutes %} + {% set is_weekend = day.is_weekend if day.is_weekend is defined else day.date.weekday() >= 5 %} + {% set is_today = today_date is defined and day.date == today_date %} + {% set status_edit_url = None %} + {% if overtime_adjustment_minutes %} + {% set status_edit_url = '/overtime-adjustment/edit?date=' ~ day.date.isoformat() %} + {% elif is_vacation %} + {% set status_edit_url = '/day-status/edit?date=' ~ day.date.isoformat() ~ '&status=vacation' %} + {% elif special_status == 'holiday' %} + {% set status_edit_url = '/day-status/edit?date=' ~ day.date.isoformat() ~ '&status=holiday' %} + {% elif special_status == 'sick' %} + {% set status_edit_url = '/day-status/edit?date=' ~ day.date.isoformat() ~ '&status=sick' %} + {% endif %} + +
+
{{ date_label }}
+ +
+ {% if day.entry %} + {{ chip(minutes_to_hhmm(day.entry.start_minutes) ~ ' → ' ~ minutes_to_hhmm(day.entry.end_minutes), 'time') }} + {{ chip('Pause: ' ~ day.entry.break_minutes ~ ' min', 'break') }} + {{ chip('Netto: ' ~ minutes_to_hhmm(day.net_minutes), 'net') }} + {% else %} + {{ chip('Keinen Eintrag', 'empty') }} + {% endif %} +
+ +
+ {% if is_vacation %}{{ status_badge_form(csrf_token, day, return_to, 'vacation') }}{% endif %} + {% if special_status == 'holiday' %}{{ status_badge_form(csrf_token, day, return_to, 'holiday') }}{% endif %} + {% if special_status == 'sick' %}{{ status_badge_form(csrf_token, day, return_to, 'sick') }}{% endif %} + {% if overtime_adjustment_minutes %}{{ status_badge_form(csrf_token, day, return_to, 'overtime') }}{% endif %} + + {% if day.entry %} + + + +
+ + +
+ {% else %} + {% if has_status and status_edit_url %} + + + + {% endif %} +
+ + + +
+ Zeit + {{ add_menu_status_form('/vacation/day/toggle', csrf_token, day, return_to, 'Urlaub (U)') }} + {{ add_menu_status_form('/special-day/toggle', csrf_token, day, return_to, 'Feiertag (F)', 'holiday') }} + {{ add_menu_status_form('/special-day/toggle', csrf_token, day, return_to, 'Krankheit (K)', 'sick') }} + Stundenausgleich (S) +
+
+ {% endif %} +
+
+{%- endmacro %} diff --git a/app/templates/ui/empty_state.html b/app/templates/ui/empty_state.html new file mode 100644 index 0000000..7141786 --- /dev/null +++ b/app/templates/ui/empty_state.html @@ -0,0 +1,6 @@ +{% macro empty_state(title, text='') -%} +
+

{{ title }}

+ {% if text %}

{{ text }}

{% endif %} +
+{%- endmacro %} diff --git a/app/templates/ui/flash.html b/app/templates/ui/flash.html new file mode 100644 index 0000000..75dab33 --- /dev/null +++ b/app/templates/ui/flash.html @@ -0,0 +1,12 @@ +{% macro alert(message, level='info') -%} +
+ {{ message }} + +
+{%- endmacro %} diff --git a/app/templates/ui/form_field.html b/app/templates/ui/form_field.html new file mode 100644 index 0000000..9259334 --- /dev/null +++ b/app/templates/ui/form_field.html @@ -0,0 +1,37 @@ +{% macro input_field(label, name, type='text', value='', required=false, placeholder='', autocomplete='', extra_class='', attrs='') -%} + +{%- endmacro %} +{% macro textarea_field(label, name, value='', rows=3, placeholder='', extra_class='', attrs='') -%} + +{%- endmacro %} +{% macro select_field(label, name, options, selected='', required=false, extra_class='', attrs='') -%} + +{%- endmacro %} diff --git a/app/templates/ui/help_section.html b/app/templates/ui/help_section.html new file mode 100644 index 0000000..8bd8057 --- /dev/null +++ b/app/templates/ui/help_section.html @@ -0,0 +1,9 @@ +{% macro help_section(title, subtitle='') -%} +
+
+

{{ title }}

+ {% if subtitle %}

{{ subtitle }}

{% endif %} +
+
{{ caller() }}
+
+{%- endmacro %} diff --git a/app/templates/ui/icon_button.html b/app/templates/ui/icon_button.html new file mode 100644 index 0000000..c3e7ce8 --- /dev/null +++ b/app/templates/ui/icon_button.html @@ -0,0 +1,17 @@ +{% macro icon_link(href, icon, label, title='', extra_class='') -%} + + + +{%- endmacro %} + +{% macro icon_submit(icon, label, title='', extra_class='') -%} + +{%- endmacro %} diff --git a/app/templates/ui/kpi_bar.html b/app/templates/ui/kpi_bar.html new file mode 100644 index 0000000..53eb314 --- /dev/null +++ b/app/templates/ui/kpi_bar.html @@ -0,0 +1,18 @@ +{% macro kpi_bar(items, extra_class='') -%} +
+ {% for item in items %} +

+ {{ item.label }}: + {{ item.value }} + {% if item.get('show_edit') %} + + {% endif %} +

+ {% endfor %} +
+{%- endmacro %} diff --git a/app/templates/ui/modal.html b/app/templates/ui/modal.html new file mode 100644 index 0000000..09c0a50 --- /dev/null +++ b/app/templates/ui/modal.html @@ -0,0 +1,18 @@ +{% macro modal(id, title, close_label='Schließen') -%} + +{%- endmacro %} diff --git a/app/templates/ui/month_components.html b/app/templates/ui/month_components.html new file mode 100644 index 0000000..a747bf3 --- /dev/null +++ b/app/templates/ui/month_components.html @@ -0,0 +1,5 @@ +{% from "ui/week_group_header.html" import week_group_header %} + +{% macro month_week_divider(week, csrf_token, return_to) -%} + {{ week_group_header(week, csrf_token, return_to) }} +{%- endmacro %} diff --git a/app/templates/ui/month_header_bar.html b/app/templates/ui/month_header_bar.html new file mode 100644 index 0000000..f89216d --- /dev/null +++ b/app/templates/ui/month_header_bar.html @@ -0,0 +1,23 @@ +{% macro month_header_bar(prev_url, next_url, title) -%} +
+
+ + + +

{{ title }}

+ + + +
+
{{ caller() }}
+
+{%- endmacro %} diff --git a/app/templates/ui/page_header.html b/app/templates/ui/page_header.html new file mode 100644 index 0000000..df42675 --- /dev/null +++ b/app/templates/ui/page_header.html @@ -0,0 +1,6 @@ +{% macro page_header(title, subtitle='') -%} + +{%- endmacro %} diff --git a/app/templates/ui/pagination.html b/app/templates/ui/pagination.html new file mode 100644 index 0000000..a4a3c0d --- /dev/null +++ b/app/templates/ui/pagination.html @@ -0,0 +1,6 @@ +{% macro pagination(prev_href='', next_href='', class_name='') -%} + +{%- endmacro %} diff --git a/app/templates/ui/segmented_toggle.html b/app/templates/ui/segmented_toggle.html new file mode 100644 index 0000000..d03ca9f --- /dev/null +++ b/app/templates/ui/segmented_toggle.html @@ -0,0 +1,8 @@ +{% macro segmented_toggle(items, aria_label='Umschalter', extra_class='') -%} + +{%- endmacro %} diff --git a/app/templates/ui/summary_components.html b/app/templates/ui/summary_components.html new file mode 100644 index 0000000..372d54f --- /dev/null +++ b/app/templates/ui/summary_components.html @@ -0,0 +1,5 @@ +{% from "ui/kpi_bar.html" import kpi_bar %} + +{% macro status_strip(items, columns='4', extra_class='') -%} + {{ kpi_bar(items, extra_class) }} +{%- endmacro %} diff --git a/app/templates/ui/table.html b/app/templates/ui/table.html new file mode 100644 index 0000000..eb2808f --- /dev/null +++ b/app/templates/ui/table.html @@ -0,0 +1,14 @@ +{% macro table(headers, class_name='') -%} +
+ + + + {% for header in headers %}{% endfor %} + + + + {{ caller() }} + +
{{ header }}
+
+{%- endmacro %} diff --git a/app/templates/ui/time_components.html b/app/templates/ui/time_components.html new file mode 100644 index 0000000..a52f9cb --- /dev/null +++ b/app/templates/ui/time_components.html @@ -0,0 +1,5 @@ +{% from "ui/day_row.html" import day_row %} + +{% macro time_day_row(day, csrf_token, date_label, return_to='/dashboard') -%} + {{ day_row(day, csrf_token, date_label, return_to, 'week') }} +{%- endmacro %} diff --git a/app/templates/ui/warning_components.html b/app/templates/ui/warning_components.html new file mode 100644 index 0000000..c2eefa8 --- /dev/null +++ b/app/templates/ui/warning_components.html @@ -0,0 +1,13 @@ +{% macro workhours_target_warning_banner(warning) -%} + {% if warning and warning.at_risk %} +
+

Achtung: Arbeitsstundenziel wird ggf. nicht erreicht

+ +
+ {% endif %} +{%- endmacro %} diff --git a/app/templates/ui/week_group_card_mobile.html b/app/templates/ui/week_group_card_mobile.html new file mode 100644 index 0000000..fc555b5 --- /dev/null +++ b/app/templates/ui/week_group_card_mobile.html @@ -0,0 +1,5 @@ +{% macro week_group_card_mobile(week, csrf_token, return_to) -%} +
+ {{ caller() }} +
+{%- endmacro %} diff --git a/app/templates/ui/week_group_header.html b/app/templates/ui/week_group_header.html new file mode 100644 index 0000000..5da36a3 --- /dev/null +++ b/app/templates/ui/week_group_header.html @@ -0,0 +1,27 @@ +{% macro week_group_header(week, csrf_token, return_to) -%} +
+
+ KW{{ week.iso_week }} + + {{ week.week_start.strftime("%d.%m.") }} - {{ week.week_end.strftime("%d.%m.") }} +
+

+ Ist {{ minutes_to_hhmm(week.weekly_ist) }} | Soll {{ minutes_to_hhmm(week.weekly_soll) }} | Delta + {{ minutes_to_hhmm(week.weekly_delta) }} + | Urlaubstage {{ week.vacation_days }} +

+
+ + + + + +
+
+{%- endmacro %} diff --git a/app/templates/ui/week_header_bar.html b/app/templates/ui/week_header_bar.html new file mode 100644 index 0000000..1776100 --- /dev/null +++ b/app/templates/ui/week_header_bar.html @@ -0,0 +1,23 @@ +{% macro week_header_bar(prev_url, next_url, title) -%} +
+
+ + + +

{{ title }}

+ + + +
+
{{ caller() }}
+
+{%- endmacro %} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0e54f3f --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,18 @@ +services: + stundenfuchs: + build: . + container_name: stundenfuchs + restart: unless-stopped + env_file: + - .env + environment: + PORT: 8000 + DB_URL: sqlite:////app/data/stundentracker.db + COOKIE_SECURE: "false" + COOKIE_SAMESITE: lax + LOGIN_RATE_LIMIT_ATTEMPTS: 5 + LOGIN_RATE_LIMIT_WINDOW_MINUTES: 15 + volumes: + - ./data:/app/data + ports: + - "8000:8000" diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md new file mode 100644 index 0000000..b5af1b1 --- /dev/null +++ b/docs/DEVELOPMENT.md @@ -0,0 +1,34 @@ +# Development + +## Voraussetzungen + +- Python 3.12+ +- Node.js 20+ + +## Installation + +```bash +python3 -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt djlint pytest pre-commit +npm ci +pre-commit install +``` + +## Wichtige ENV-Variablen + +- `SESSION_SECRET`: starkes Secret für Sessions +- `DATA_ENCRYPTION_KEY`: separates Secret für verschlüsselte Betriebsgeheimnisse +- `BOOTSTRAP_ADMIN_EMAIL`: optionaler Initial-Admin +- `FORWARDED_ALLOW_IPS`: vertrauenswürdige Proxy-Adressen + +Beispielwerte stehen in `.env.example`. + +## Befehle + +```bash +make policy +make lint +make test +make ci +``` diff --git a/docs/SECURITY.md b/docs/SECURITY.md new file mode 100644 index 0000000..d8f15bd --- /dev/null +++ b/docs/SECURITY.md @@ -0,0 +1,22 @@ +# Security Notes + +## Umgesetzte Maßnahmen + +- Passwörter werden mit Argon2 gehasht. +- Zustandsändernde Requests sind CSRF-geschützt. +- Benutzerzugriffe werden serverseitig an die eigene Identität gebunden. +- Admin-Endpunkte sind rollenbasiert abgesichert. +- Passwort-Reset-Tokens werden nur gehasht gespeichert. +- MFA unterstützt TOTP und E-Mail-Codes. +- Besonders sensible Betriebsgeheimnisse können verschlüsselt gespeichert werden. + +## Betriebsrichtlinien + +- `SESSION_SECRET` und `DATA_ENCRYPTION_KEY` getrennt und stark setzen. +- Reverse-Proxy und TLS aktuell halten. +- Logins und Admin-Aktionen überwachen. +- Backups schützen und regelmäßig testen. + +## Meldung von Sicherheitsproblemen + +Bitte veröffentliche Sicherheitsprobleme nicht direkt als öffentliches Issue. Nutze einen privaten Kontaktkanal des Betreibers oder maintainerspezifische Security-Kontaktdaten. diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..bc2bcb5 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1547 @@ +{ + "name": "stundentracker-ui-guardrails", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "stundentracker-ui-guardrails", + "version": "1.0.0", + "devDependencies": { + "stylelint": "^16.15.0", + "stylelint-config-standard": "^37.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@cacheable/memory": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@cacheable/memory/-/memory-2.0.8.tgz", + "integrity": "sha512-FvEb29x5wVwu/Kf93IWwsOOEuhHh6dYCJF3vcKLzXc0KXIW181AOzv6ceT4ZpBHDvAfG60eqb+ekmrnLHIy+jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cacheable/utils": "^2.4.0", + "@keyv/bigmap": "^1.3.1", + "hookified": "^1.15.1", + "keyv": "^5.6.0" + } + }, + "node_modules/@cacheable/utils": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@cacheable/utils/-/utils-2.4.0.tgz", + "integrity": "sha512-PeMMsqjVq+bF0WBsxFBxr/WozBJiZKY0rUojuaCoIaKnEl3Ju1wfEwS+SV1DU/cSe8fqHIPiYJFif8T3MVt4cQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "hashery": "^1.5.0", + "keyv": "^5.6.0" + } + }, + "node_modules/@csstools/css-parser-algorithms": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz", + "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-tokenizer": "^3.0.4" + } + }, + "node_modules/@csstools/css-syntax-patches-for-csstree": { + "version": "1.0.29", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.29.tgz", + "integrity": "sha512-jx9GjkkP5YHuTmko2eWAvpPnb0mB4mGRr2U7XwVNwevm8nlpobZEVk+GNmiYMk2VuA75v+plfXWyroWKmICZXg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0" + }, + "node_modules/@csstools/css-tokenizer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz", + "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@csstools/media-query-list-parser": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@csstools/media-query-list-parser/-/media-query-list-parser-4.0.3.tgz", + "integrity": "sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^3.0.5", + "@csstools/css-tokenizer": "^3.0.4" + } + }, + "node_modules/@csstools/selector-specificity": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", + "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss-selector-parser": "^7.0.0" + } + }, + "node_modules/@dual-bundle/import-meta-resolve": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@dual-bundle/import-meta-resolve/-/import-meta-resolve-4.2.1.tgz", + "integrity": "sha512-id+7YRUgoUX6CgV0DtuhirQWodeeA7Lf4i2x71JS/vtA5pRb/hIGWlw+G6MeXvsM+MXrz0VAydTGElX1rAfgPg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/JounQin" + } + }, + "node_modules/@keyv/bigmap": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@keyv/bigmap/-/bigmap-1.3.1.tgz", + "integrity": "sha512-WbzE9sdmQtKy8vrNPa9BRnwZh5UF4s1KTmSK0KUVLo3eff5BlQNNWDnFOouNpKfPKDnms9xynJjsMYjMaT/aFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "hashery": "^1.4.0", + "hookified": "^1.15.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "keyv": "^5.6.0" + } + }, + "node_modules/@keyv/serialize": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@keyv/serialize/-/serialize-1.1.1.tgz", + "integrity": "sha512-dXn3FZhPv0US+7dtJsIi2R+c7qWYiReoEh5zUntWCf4oSpMNib8FDhSoed6m3QyZdx5hK7iLFkYk3rNxwt8vTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/ajv": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", + "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/balanced-match": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-2.0.0.tgz", + "integrity": "sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==", + "dev": true, + "license": "MIT" + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/cacheable/-/cacheable-2.3.3.tgz", + "integrity": "sha512-iffYMX4zxKp54evOH27fm92hs+DeC1DhXmNVN8Tr94M/iZIV42dqTHSR2Ik4TOSPyOAwKr7Yu3rN9ALoLkbWyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cacheable/memory": "^2.0.8", + "@cacheable/utils": "^2.4.0", + "hookified": "^1.15.0", + "keyv": "^5.6.0", + "qified": "^0.6.0" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/colord": { + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/cosmiconfig": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.1.tgz", + "integrity": "sha512-hr4ihw+DBqcvrsEDioRO31Z17x71pUYoNe/4h6Z0wB72p7MU7/9gH8Q3s12NFhHPfYBBOV3qyfUxmr/Yn3shnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/css-functions-list": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/css-functions-list/-/css-functions-list-3.3.3.tgz", + "integrity": "sha512-8HFEBPKhOpJPEPu70wJJetjKta86Gw9+CCyCnB3sui2qQfOvRyqBy4IKLKKAwdMpWb2lHXWk9Wb4Z6AmaUT1Pg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/css-tree": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", + "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "mdn-data": "2.12.2", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", + "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-11.1.2.tgz", + "integrity": "sha512-N2WFfK12gmrK1c1GXOqiAJ1tc5YE+R53zvQ+t5P8S5XhnmKYVB5eZEiLNZKDSmoG8wqqbF9EXYBBW/nef19log==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^6.1.20" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat-cache": { + "version": "6.1.20", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-6.1.20.tgz", + "integrity": "sha512-AhHYqwvN62NVLp4lObVXGVluiABTHapoB57EyegZVmazN+hhGhLTn3uZbOofoTw4DSDvVCadzzyChXhOAvy8uQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "cacheable": "^2.3.2", + "flatted": "^3.3.3", + "hookified": "^1.15.0" + } + }, + "node_modules/flatted": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.4.tgz", + "integrity": "sha512-3+mMldrTAPdta5kjX2G2J7iX4zxtnwpdA8Tr2ZSjkyPSanvbZAcy6flmtnXbEybHrDcU9641lxrMfFuUxVz9vA==", + "dev": true, + "license": "ISC" + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/global-modules": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "global-prefix": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/global-prefix": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ini": "^1.3.5", + "kind-of": "^6.0.2", + "which": "^1.3.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/globjoin": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/globjoin/-/globjoin-0.1.4.tgz", + "integrity": "sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hashery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/hashery/-/hashery-1.5.0.tgz", + "integrity": "sha512-nhQ6ExaOIqti2FDWoEMWARUqIKyjr2VcZzXShrI+A3zpeiuPWzx6iPftt44LhP74E5sW36B75N6VHbvRtpvO6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "hookified": "^1.14.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/hookified": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/hookified/-/hookified-1.15.1.tgz", + "integrity": "sha512-MvG/clsADq1GPM2KGo2nyfaWVyn9naPiXrqIe4jYjXNZQt238kWyOGrsyc/DmRAQ+Re6yeo6yX/yoNCG5KAEVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/html-tags": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz", + "integrity": "sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.6.0.tgz", + "integrity": "sha512-CYDD3SOtsHtyXeEORYRx2qBtpDJFjRTGXUtmNEMGyzYOKj1TE3tycdlho7kA1Ufx9OYWZzg52QFBGALTirzDSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@keyv/serialize": "^1.1.1" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/known-css-properties": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.37.0.tgz", + "integrity": "sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==", + "dev": true, + "license": "MIT" + }, + "node_modules/mathml-tag-names": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/mathml-tag-names/-/mathml-tag-names-2.1.3.tgz", + "integrity": "sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdn-data": { + "version": "2.12.2", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", + "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-resolve-nested-selector": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.6.tgz", + "integrity": "sha512-0sglIs9Wmkzbr8lQwEyIzlDOOC9bGmfVKcJTaxv3vMmd3uo4o4DerC3En0bnmgceeql9BfC8hRkp7cg0fjdVqw==", + "dev": true, + "license": "MIT" + }, + "node_modules/postcss-safe-parser": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-7.0.1.tgz", + "integrity": "sha512-0AioNCJZ2DPYz5ABT6bddIqlhgwhpHZ/l65YAYo0BCIn0xiDpsnTHz0gnoTGk0OXZW0JRs+cDwL8u/teRdz+8A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-safe-parser" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "postcss": "^8.4.31" + } + }, + "node_modules/postcss-selector-parser": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/qified": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/qified/-/qified-0.6.0.tgz", + "integrity": "sha512-tsSGN1x3h569ZSU1u6diwhltLyfUWDp3YbFHedapTmpBl0B3P6U3+Qptg7xu+v+1io1EwhdPyyRHYbEw0KN2FA==", + "dev": true, + "license": "MIT", + "dependencies": { + "hookified": "^1.14.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/stylelint": { + "version": "16.26.1", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-16.26.1.tgz", + "integrity": "sha512-v20V59/crfc8sVTAtge0mdafI3AdnzQ2KsWe6v523L4OA1bJO02S7MO2oyXDCS6iWb9ckIPnqAFVItqSBQr7jw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/stylelint" + }, + { + "type": "github", + "url": "https://github.com/sponsors/stylelint" + } + ], + "license": "MIT", + "dependencies": { + "@csstools/css-parser-algorithms": "^3.0.5", + "@csstools/css-syntax-patches-for-csstree": "^1.0.19", + "@csstools/css-tokenizer": "^3.0.4", + "@csstools/media-query-list-parser": "^4.0.3", + "@csstools/selector-specificity": "^5.0.0", + "@dual-bundle/import-meta-resolve": "^4.2.1", + "balanced-match": "^2.0.0", + "colord": "^2.9.3", + "cosmiconfig": "^9.0.0", + "css-functions-list": "^3.2.3", + "css-tree": "^3.1.0", + "debug": "^4.4.3", + "fast-glob": "^3.3.3", + "fastest-levenshtein": "^1.0.16", + "file-entry-cache": "^11.1.1", + "global-modules": "^2.0.0", + "globby": "^11.1.0", + "globjoin": "^0.1.4", + "html-tags": "^3.3.1", + "ignore": "^7.0.5", + "imurmurhash": "^0.1.4", + "is-plain-object": "^5.0.0", + "known-css-properties": "^0.37.0", + "mathml-tag-names": "^2.1.3", + "meow": "^13.2.0", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.5.6", + "postcss-resolve-nested-selector": "^0.1.6", + "postcss-safe-parser": "^7.0.1", + "postcss-selector-parser": "^7.1.0", + "postcss-value-parser": "^4.2.0", + "resolve-from": "^5.0.0", + "string-width": "^4.2.3", + "supports-hyperlinks": "^3.2.0", + "svg-tags": "^1.0.0", + "table": "^6.9.0", + "write-file-atomic": "^5.0.1" + }, + "bin": { + "stylelint": "bin/stylelint.mjs" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/stylelint-config-recommended": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-15.0.0.tgz", + "integrity": "sha512-9LejMFsat7L+NXttdHdTq94byn25TD+82bzGRiV1Pgasl99pWnwipXS5DguTpp3nP1XjvLXVnEJIuYBfsRjRkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/stylelint" + }, + { + "type": "github", + "url": "https://github.com/sponsors/stylelint" + } + ], + "license": "MIT", + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "stylelint": "^16.13.0" + } + }, + "node_modules/stylelint-config-standard": { + "version": "37.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-37.0.0.tgz", + "integrity": "sha512-+6eBlbSTrOn/il2RlV0zYGQwRTkr+WtzuVSs1reaWGObxnxLpbcspCUYajVQHonVfxVw2U+h42azGhrBvcg8OA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/stylelint" + }, + { + "type": "github", + "url": "https://github.com/sponsors/stylelint" + } + ], + "license": "MIT", + "dependencies": { + "stylelint-config-recommended": "^15.0.0" + }, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "stylelint": "^16.13.0" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-hyperlinks": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-3.2.0.tgz", + "integrity": "sha512-zFObLMyZeEwzAoKCyu1B91U79K2t7ApXuQfo8OuxwXLDgcKxuwM+YvcbIhm6QWqz7mHUH1TVytR1PwVVjEuMig==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "engines": { + "node": ">=14.18" + }, + "funding": { + "url": "https://github.com/chalk/supports-hyperlinks?sponsor=1" + } + }, + "node_modules/svg-tags": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/svg-tags/-/svg-tags-1.0.0.tgz", + "integrity": "sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==", + "dev": true + }, + "node_modules/table": { + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.9.0.tgz", + "integrity": "sha512-9kY+CygyYM6j02t5YFHbNz2FN5QmYGv9zAjVp4lCDjlCw7amdckXlEt/bjMhUIfj4ThGRE4gCUH5+yGnNuPo5A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..e2a5f7d --- /dev/null +++ b/package.json @@ -0,0 +1,12 @@ +{ + "name": "stundentracker-ui-guardrails", + "private": true, + "version": "1.0.0", + "scripts": { + "lint:css": "stylelint \"app/static/css/**/*.css\"" + }, + "devDependencies": { + "stylelint": "^16.15.0", + "stylelint-config-standard": "^37.0.0" + } +} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..59c07d7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.djlint] +profile = "jinja" +indent = 2 +max_line_length = 140 +ignore = "H006,H013" +extension = "html" + +[tool.djlint.css] +indent_size = 2 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..99979b0 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,19 @@ +fastapi==0.116.1 +uvicorn[standard]==0.35.0 +jinja2==3.1.6 +sqlalchemy==2.0.43 +pydantic==2.11.7 +pydantic-settings==2.10.1 +email-validator==2.2.0 +passlib[argon2]==1.7.4 +python-multipart==0.0.20 +itsdangerous==2.2.0 +openpyxl==3.1.5 +reportlab==4.2.2 +pyotp==2.9.0 +cryptography==44.0.2 +holidays==0.66 +markdown==3.7 +bleach==6.2.0 +pytest==8.4.1 +httpx==0.28.1 diff --git a/scripts/ui_guardrails.py b/scripts/ui_guardrails.py new file mode 100755 index 0000000..d3f2c60 --- /dev/null +++ b/scripts/ui_guardrails.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 +"""Backward compatible wrapper. Use tools/policy_checks.py.""" +from tools.policy_checks import main + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..c5b87cc --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,30 @@ +from pathlib import Path +import sys + +import pytest + +ROOT_DIR = Path(__file__).resolve().parents[1] +if str(ROOT_DIR) not in sys.path: + sys.path.insert(0, str(ROOT_DIR)) + +from app.config import Settings +from app.main import create_app + + +def make_settings(db_url: str) -> Settings: + return Settings( + APP_ENV="test", + DB_URL=db_url, + SESSION_SECRET="test-secret", + COOKIE_SECURE=False, + COOKIE_SAMESITE="lax", + LOGIN_RATE_LIMIT_ATTEMPTS=5, + LOGIN_RATE_LIMIT_WINDOW_MINUTES=15, + ) + + +@pytest.fixture() +def app(tmp_path): + db_path = tmp_path / "test.db" + settings = make_settings(f"sqlite:///{db_path}") + return create_app(settings) diff --git a/tests/test_admin_and_mfa.py b/tests/test_admin_and_mfa.py new file mode 100644 index 0000000..e79a347 --- /dev/null +++ b/tests/test_admin_and_mfa.py @@ -0,0 +1,196 @@ +from fastapi.testclient import TestClient +import pyotp + +from app.config import Settings +from app.main import create_app + + +def _build_settings(db_url: str, *, bootstrap_admin_email: str | None = None) -> Settings: + return Settings( + APP_ENV="test", + DB_URL=db_url, + SESSION_SECRET="test-secret", + COOKIE_SECURE=False, + COOKIE_SAMESITE="lax", + LOGIN_RATE_LIMIT_ATTEMPTS=5, + LOGIN_RATE_LIMIT_WINDOW_MINUTES=15, + BOOTSTRAP_ADMIN_EMAIL=bootstrap_admin_email, + ) + + +def test_bootstrap_admin_can_manage_users(tmp_path): + db_path = tmp_path / "admin.db" + app = create_app(_build_settings(f"sqlite:///{db_path}", bootstrap_admin_email="admin@example.com")) + + with TestClient(app) as admin_client, TestClient(app) as user_client: + admin_register = admin_client.post( + "/auth/register", + json={"email": "admin@example.com", "password": "verystrongPass123"}, + ) + assert admin_register.status_code == 200 + admin_payload = admin_register.json() + assert admin_payload["role"] == "admin" + admin_csrf = admin_payload["csrf_token"] + + user_register = user_client.post( + "/auth/register", + json={"email": "normal@example.com", "password": "verystrongPass123"}, + ) + assert user_register.status_code == 200 + user_payload = user_register.json() + user_id = user_payload["id"] + user_csrf = user_payload["csrf_token"] + + forbidden = user_client.post( + "/settings/admin/users/" + user_id, + data={"csrf_token": user_csrf, "role": "admin", "is_active": "on"}, + follow_redirects=False, + ) + assert forbidden.status_code == 403 + + updated = admin_client.post( + "/settings/admin/users/" + user_id, + data={"csrf_token": admin_csrf, "role": "admin", "is_active": "on"}, + follow_redirects=False, + ) + assert updated.status_code == 303 + assert updated.headers["location"].startswith("/settings?tab=admin&msg=admin_user_updated") + + me_after = user_client.get("/me") + assert me_after.status_code == 200 + assert me_after.json()["role"] == "admin" + + +def test_special_case_email_no_longer_becomes_admin(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "special-case@example.net", "password": "verystrongPass123"}, + ) + assert register.status_code == 200 + assert register.json()["role"] == "user" + + +def test_totp_mfa_login_flow(app): + password = "verystrongPass123" + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "mfa-user@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf_token = register.json()["csrf_token"] + + setup_start = client.post( + "/settings/mfa", + data={ + "csrf_token": csrf_token, + "mfa_method": "totp", + "current_password": password, + "setup_code": "", + }, + ) + assert setup_start.status_code == 200 + assert "TOTP Secret" in setup_start.text + assert "TOTP-Einrichtung läuft" in setup_start.text + assert "Aktive Methode: Keine 2FA" not in setup_start.text + assert 'option value="totp" selected' in setup_start.text + assert 'option value="none" selected' not in setup_start.text + + marker = 'name="setup_code"' + assert marker in setup_start.text + secret_prefix = 'TOTP Secret' + assert secret_prefix in setup_start.text + + value_marker = 'readonly' + assert value_marker in setup_start.text + + # Extract setup secret from rendered readonly input field. + snippet_start = setup_start.text.find("TOTP Secret") + secret_value_start = setup_start.text.find("value=\"", snippet_start) + len("value=\"") + secret_value_end = setup_start.text.find("\"", secret_value_start) + setup_secret = setup_start.text[secret_value_start:secret_value_end] + assert setup_secret + + setup_code = pyotp.TOTP(setup_secret).now() + setup_finish = client.post( + "/settings/mfa", + data={ + "csrf_token": csrf_token, + "mfa_method": "totp", + "current_password": password, + "setup_code": setup_code, + }, + follow_redirects=False, + ) + assert setup_finish.status_code == 303 + assert setup_finish.headers["location"].startswith("/settings?msg=mfa_updated") + + settings_after = client.get("/settings") + assert settings_after.status_code == 200 + assert "Status:" in settings_after.text + assert "Authenticator-App (TOTP)" in settings_after.text + assert "TOTP-Einrichtung läuft" not in settings_after.text + +def test_totp_setup_form_keeps_totp_selected_until_confirmation(app): + password = "verystrongPass123" + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "mfa-user-2@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf_token = register.json()["csrf_token"] + + setup_start = client.post( + "/settings/mfa", + data={ + "csrf_token": csrf_token, + "mfa_method": "totp", + "current_password": password, + "setup_code": "", + }, + ) + assert setup_start.status_code == 200 + assert 'option value="totp" selected' in setup_start.text + assert 'option value="none" selected' not in setup_start.text + + +def test_admin_can_delete_user_but_not_self(tmp_path): + db_path = tmp_path / "admin-delete.db" + app = create_app(_build_settings(f"sqlite:///{db_path}", bootstrap_admin_email="admin@example.com")) + + with TestClient(app) as admin_client, TestClient(app) as user_client: + admin_register = admin_client.post( + "/auth/register", + json={"email": "admin@example.com", "password": "verystrongPass123"}, + ) + assert admin_register.status_code == 200 + admin_payload = admin_register.json() + admin_id = admin_payload["id"] + admin_csrf = admin_payload["csrf_token"] + + user_register = user_client.post( + "/auth/register", + json={"email": "delete-me@example.com", "password": "verystrongPass123"}, + ) + assert user_register.status_code == 200 + user_id = user_register.json()["id"] + + delete_user = admin_client.post( + f"/settings/admin/users/{user_id}/delete", + data={"csrf_token": admin_csrf}, + follow_redirects=False, + ) + assert delete_user.status_code == 303 + assert delete_user.headers["location"].startswith("/settings?tab=admin&msg=admin_user_deleted") + + me_deleted_user = user_client.get("/me") + assert me_deleted_user.status_code == 401 + + delete_self = admin_client.post( + f"/settings/admin/users/{admin_id}/delete", + data={"csrf_token": admin_csrf}, + follow_redirects=False, + ) + assert delete_self.status_code == 400 diff --git a/tests/test_auth_isolation.py b/tests/test_auth_isolation.py new file mode 100644 index 0000000..af7d015 --- /dev/null +++ b/tests/test_auth_isolation.py @@ -0,0 +1,53 @@ +from fastapi.testclient import TestClient + + +def test_login_and_tenant_isolation(app): + with TestClient(app) as client_a, TestClient(app) as client_b: + register_a = client_a.post( + "/auth/register", + json={"email": "a@example.com", "password": "strongpasswordA1"}, + ) + assert register_a.status_code == 200 + csrf_a = register_a.json()["csrf_token"] + + register_b = client_b.post( + "/auth/register", + json={"email": "b@example.com", "password": "strongpasswordB1"}, + ) + assert register_b.status_code == 200 + csrf_b = register_b.json()["csrf_token"] + + create_a = client_a.post( + "/time-entries", + headers={"x-csrf-token": csrf_a}, + json={ + "date": "2026-02-24", + "start_time": "08:30", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert create_a.status_code == 200 + entry_id = create_a.json()["id"] + + list_b = client_b.get("/time-entries") + assert list_b.status_code == 200 + assert list_b.json()["items"] == [] + + patch_b = client_b.patch( + f"/time-entries/{entry_id}", + headers={"x-csrf-token": csrf_b}, + json={"break_minutes": 15}, + ) + assert patch_b.status_code == 404 + + client_a.post("/auth/logout", headers={"x-csrf-token": csrf_a}) + login_a = client_a.post( + "/auth/login", + json={"email": "a@example.com", "password": "strongpasswordA1"}, + ) + assert login_a.status_code == 200 + + me_a = client_a.get("/me") + assert me_a.status_code == 200 + assert me_a.json()["email"] == "a@example.com" diff --git a/tests/test_bulk_entry.py b/tests/test_bulk_entry.py new file mode 100644 index 0000000..a252fb1 --- /dev/null +++ b/tests/test_bulk_entry.py @@ -0,0 +1,68 @@ +from fastapi.testclient import TestClient + + +def test_bulk_entry_create_and_update(app): + with TestClient(app) as client_a, TestClient(app) as client_b: + register_a = client_a.post( + "/auth/register", + json={"email": "bulk-a@example.com", "password": "strongpasswordBulkA1"}, + ) + assert register_a.status_code == 200 + csrf_a = register_a.json()["csrf_token"] + + register_b = client_b.post( + "/auth/register", + json={"email": "bulk-b@example.com", "password": "strongpasswordBulkB1"}, + ) + assert register_b.status_code == 200 + + create_range = client_a.post( + "/bulk-entry", + data={ + "from_date": "2026-03-02", + "to_date": "2026-03-13", + "weekdays_values": ["0", "1", "2", "3", "4"], + "start_time": "08:00", + "end_time": "12:00", + "break_minutes": "0", + "mode": "only_missing", + "notes": "nachtrag", + "csrf_token": csrf_a, + }, + follow_redirects=False, + ) + assert create_range.status_code == 303 + + list_a = client_a.get("/time-entries", params={"from": "2026-03-01", "to": "2026-03-31"}) + assert list_a.status_code == 200 + assert len(list_a.json()["items"]) == 10 + + update_range = client_a.post( + "/bulk-entry", + data={ + "from_date": "2026-03-02", + "to_date": "2026-03-13", + "weekdays_values": ["0", "1", "2", "3", "4"], + "start_time": "08:30", + "end_time": "13:30", + "break_minutes": "30", + "mode": "upsert", + "notes": "korrigiert", + "csrf_token": csrf_a, + }, + follow_redirects=False, + ) + assert update_range.status_code == 303 + + list_a_updated = client_a.get("/time-entries", params={"from": "2026-03-01", "to": "2026-03-31"}) + assert list_a_updated.status_code == 200 + items = list_a_updated.json()["items"] + assert len(items) == 10 + assert items[0]["start_time"] == "08:30" + assert items[0]["end_time"] == "13:30" + assert items[0]["break_minutes"] == 30 + assert items[0]["notes"] == "korrigiert" + + list_b = client_b.get("/time-entries", params={"from": "2026-03-01", "to": "2026-03-31"}) + assert list_b.status_code == 200 + assert list_b.json()["items"] == [] diff --git a/tests/test_calculations.py b/tests/test_calculations.py new file mode 100644 index 0000000..c9f9afd --- /dev/null +++ b/tests/test_calculations.py @@ -0,0 +1,85 @@ +from dataclasses import dataclass +from datetime import date, timedelta + +import pytest + +from app.services.calculations import ( + aggregate_week, + automatic_break_minutes, + compute_net_minutes, + cumulative_delta, + iso_week_bounds, + parse_time_to_minutes, + required_break_minutes_for_span, +) + + +@dataclass +class FakeEntry: + date: date + start_minutes: int + end_minutes: int + break_minutes: int + + +def test_compute_net_minutes_and_validation() -> None: + assert compute_net_minutes(8 * 60, 15 * 60, 30) == 390 + + with pytest.raises(ValueError): + compute_net_minutes(8 * 60, 8 * 60, 0) + + with pytest.raises(ValueError): + compute_net_minutes(8 * 60, 12 * 60, 300) + + +def test_parse_time_to_minutes() -> None: + assert parse_time_to_minutes("08:30") == 510 + + with pytest.raises(ValueError): + parse_time_to_minutes("8:30") + + +def test_required_break_minutes_follow_german_thresholds() -> None: + assert required_break_minutes_for_span(6 * 60) == 0 + assert required_break_minutes_for_span((6 * 60) + 1) == 30 + assert required_break_minutes_for_span(9 * 60) == 30 + assert required_break_minutes_for_span((9 * 60) + 1) == 45 + + +def test_automatic_break_minutes_uses_work_span() -> None: + assert automatic_break_minutes(8 * 60, 14 * 60) == 0 + assert automatic_break_minutes(8 * 60, (14 * 60) + 1) == 30 + assert automatic_break_minutes(8 * 60, 17 * 60) == 30 + assert automatic_break_minutes(8 * 60, (17 * 60) + 1) == 45 + + with pytest.raises(ValueError): + automatic_break_minutes(8 * 60, 8 * 60) + + +def test_week_aggregation_and_delta() -> None: + week_start, _ = iso_week_bounds(date(2026, 2, 25)) + entries = [ + FakeEntry(date=week_start, start_minutes=8 * 60, end_minutes=13 * 60, break_minutes=15), + FakeEntry(date=week_start + timedelta(days=1), start_minutes=9 * 60, end_minutes=14 * 60, break_minutes=30), + ] + + report = aggregate_week(entries, week_start, weekly_target_minutes=1500) + + assert report["weekly_ist"] == (285 + 270) + assert report["weekly_soll"] == 1500 + assert report["weekly_delta"] == -945 + + +def test_cumulative_delta_multiple_weeks() -> None: + first_week_start, _ = iso_week_bounds(date(2026, 2, 2)) + second_week_start = first_week_start + timedelta(days=7) + + entries = [ + FakeEntry(date=first_week_start, start_minutes=8 * 60, end_minutes=13 * 60, break_minutes=0), + FakeEntry(date=second_week_start, start_minutes=8 * 60, end_minutes=16 * 60, break_minutes=30), + ] + + result = cumulative_delta(entries, second_week_start, weekly_target_minutes=1500) + + # Woche 1: 300 - 1500, Woche 2: 450 - 1500 + assert result == -2250 diff --git a/tests/test_email_verification.py b/tests/test_email_verification.py new file mode 100644 index 0000000..0aef716 --- /dev/null +++ b/tests/test_email_verification.py @@ -0,0 +1,113 @@ +import re + +from fastapi.testclient import TestClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.config import Settings +from app.database import get_engine +from app.main import create_app +from app.models import EmailServerConfig, User + + +def _extract_csrf(html: str) -> str: + match = re.search(r'name="csrf_token" value="([^"]+)"', html) + assert match is not None + return match.group(1) + + +def _build_settings(db_url: str) -> Settings: + return Settings( + APP_ENV="test", + DB_URL=db_url, + SESSION_SECRET="test-secret", + COOKIE_SECURE=False, + COOKIE_SAMESITE="lax", + LOGIN_RATE_LIMIT_ATTEMPTS=5, + LOGIN_RATE_LIMIT_WINDOW_MINUTES=15, + EMAIL_VERIFICATION_REQUIRED=True, + ) + + +def test_register_requires_email_verification_with_mail_server(tmp_path, monkeypatch): + db_path = tmp_path / "verify.db" + app = create_app(_build_settings(f"sqlite:///{db_path}")) + + sent_mails: list[dict[str, str]] = [] + + def fake_send_email(*, settings, to_email: str, subject: str, text_body: str) -> None: + sent_mails.append({"to": to_email, "subject": subject, "body": text_body}) + + monkeypatch.setattr("app.main.send_email", fake_send_email) + + with Session(get_engine()) as db: + db.add( + EmailServerConfig( + smtp_host="smtp.test.local", + smtp_port=587, + from_email="noreply@test.local", + from_name="Stundentracker", + use_starttls=True, + use_ssl=False, + verify_tls=False, + registration_mails_enabled=True, + password_reset_mails_enabled=True, + ) + ) + db.commit() + + with TestClient(app) as client: + register_page = client.get("/register") + csrf = _extract_csrf(register_page.text) + + register_submit = client.post( + "/register", + data={ + "email": "verify-user@example.com", + "password": "strongpasswordVerify1", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert register_submit.status_code == 303 + assert register_submit.headers["location"] == "/login?msg=email_verification_sent" + assert len(sent_mails) == 1 + assert sent_mails[0]["to"] == "verify-user@example.com" + + login_page = client.get("/login") + login_csrf = _extract_csrf(login_page.text) + denied_login = client.post( + "/login", + data={ + "email": "verify-user@example.com", + "password": "strongpasswordVerify1", + "csrf_token": login_csrf, + }, + follow_redirects=False, + ) + assert denied_login.status_code == 403 + assert "Bitte zuerst deine E-Mail-Adresse bestätigen" in denied_login.text + + link_match = re.search(r"https?://[^\s]+/verify-email\?token=[^\s]+", sent_mails[0]["body"]) + assert link_match is not None + verify_response = client.get(link_match.group(0), follow_redirects=False) + assert verify_response.status_code == 303 + assert verify_response.headers["location"] == "/login?msg=email_verified" + + login_page_after_verify = client.get("/login") + login_csrf_after_verify = _extract_csrf(login_page_after_verify.text) + login_after_verify = client.post( + "/login", + data={ + "email": "verify-user@example.com", + "password": "strongpasswordVerify1", + "csrf_token": login_csrf_after_verify, + }, + follow_redirects=False, + ) + assert login_after_verify.status_code == 303 + assert login_after_verify.headers["location"].startswith("/dashboard") + + with Session(get_engine()) as db: + verified_user = db.execute(select(User).where(User.email == "verify-user@example.com")).scalar_one() + assert verified_user.email_verified is True diff --git a/tests/test_export.py b/tests/test_export.py new file mode 100644 index 0000000..2bd2706 --- /dev/null +++ b/tests/test_export.py @@ -0,0 +1,50 @@ +from fastapi.testclient import TestClient + + +def test_export_xlsx_and_pdf(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "export@example.com", "password": "strongpasswordExport1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + create = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:30", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert create.status_code == 200 + + export_xlsx = client.post( + "/export", + data={ + "from_date": "2026-03-01", + "to_date": "2026-03-10", + "format": "xlsx", + "csrf_token": csrf, + }, + ) + assert export_xlsx.status_code == 200 + assert "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" in export_xlsx.headers["content-type"] + assert "attachment;" in export_xlsx.headers["content-disposition"] + assert len(export_xlsx.content) > 200 + + export_pdf = client.post( + "/export", + data={ + "from_date": "2026-03-01", + "to_date": "2026-03-10", + "format": "pdf", + "csrf_token": csrf, + }, + ) + assert export_pdf.status_code == 200 + assert "application/pdf" in export_pdf.headers["content-type"] + assert export_pdf.content.startswith(b"%PDF") diff --git a/tests/test_legal_and_support.py b/tests/test_legal_and_support.py new file mode 100644 index 0000000..9c4671b --- /dev/null +++ b/tests/test_legal_and_support.py @@ -0,0 +1,214 @@ +from datetime import datetime, timedelta, timezone +import re + +from fastapi.testclient import TestClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.config import Settings +from app.database import get_engine +from app.main import create_app +from app.models import SupportTicket + + +def _csrf_from_html(html: str) -> str: + match = re.search(r'name="csrf_token" value="([^"]+)"', html) + assert match is not None + return match.group(1) + + +def _started_at_from_html(html: str) -> str: + match = re.search(r'name="started_at" value="([^"]+)"', html) + assert match is not None + return match.group(1) + + +def _build_admin_app(db_url: str) -> object: + return create_app( + Settings( + APP_ENV="test", + DB_URL=db_url, + SESSION_SECRET="test-secret", + COOKIE_SECURE=False, + COOKIE_SAMESITE="lax", + LOGIN_RATE_LIMIT_ATTEMPTS=5, + LOGIN_RATE_LIMIT_WINDOW_MINUTES=15, + BOOTSTRAP_ADMIN_EMAIL="admin@example.com", + ) + ) + + +def test_public_footer_and_legal_pages_render(app): + with TestClient(app) as client: + response = client.get("/login") + assert response.status_code == 200 + assert 'href="/kontakt"' in response.text + assert 'href="/impressum"' in response.text + assert 'href="/datenschutz"' in response.text + + impressum = client.get("/impressum") + assert impressum.status_code == 200 + assert "Impressum" in impressum.text + + privacy = client.get("/datenschutz") + assert privacy.status_code == 200 + assert "Datenschutz" in privacy.text + + +def test_contact_form_creates_ticket(monkeypatch, app): + import app.main as main_module + + base_time = datetime(2026, 3, 22, 12, 0, tzinfo=timezone.utc) + monkeypatch.setattr(main_module, "utc_now", lambda: base_time) + + with TestClient(app) as client: + form = client.get("/kontakt") + assert form.status_code == 200 + csrf = _csrf_from_html(form.text) + started_at = _started_at_from_html(form.text) + + monkeypatch.setattr(main_module, "utc_now", lambda: base_time + timedelta(seconds=5)) + submit = client.post( + "/kontakt", + data={ + "csrf_token": csrf, + "started_at": started_at, + "website": "", + "category": "feature", + "name": "Max Beispiel", + "email": "max@example.com", + "subject": "Bitte Monatsfilter erweitern", + "message": "Ich wünsche mir eine bessere Filterung in der Monatsansicht.", + }, + follow_redirects=False, + ) + assert submit.status_code == 303 + assert submit.headers["location"] == "/kontakt?msg=sent" + + with Session(get_engine()) as db: + tickets = db.execute(select(SupportTicket)).scalars().all() + assert len(tickets) == 1 + assert tickets[0].category == "feature" + assert tickets[0].status == "open" + assert tickets[0].subject == "Bitte Monatsfilter erweitern" + + +def test_contact_form_honeypot_blocks_submission(monkeypatch, app): + import app.main as main_module + + base_time = datetime(2026, 3, 22, 12, 0, tzinfo=timezone.utc) + monkeypatch.setattr(main_module, "utc_now", lambda: base_time) + + with TestClient(app) as client: + form = client.get("/kontakt") + csrf = _csrf_from_html(form.text) + started_at = _started_at_from_html(form.text) + + monkeypatch.setattr(main_module, "utc_now", lambda: base_time + timedelta(seconds=5)) + submit = client.post( + "/kontakt", + data={ + "csrf_token": csrf, + "started_at": started_at, + "website": "spam", + "category": "problem", + "name": "", + "email": "spam@example.com", + "subject": "Spamversuch", + "message": "Das sollte blockiert werden.", + }, + ) + assert submit.status_code == 429 + assert "nicht versendet" in submit.text + + with Session(get_engine()) as db: + tickets = db.execute(select(SupportTicket)).scalars().all() + assert tickets == [] + + +def test_admin_can_manage_legal_content_and_tickets(tmp_path, monkeypatch): + import app.main as main_module + + db_path = tmp_path / "legal-support.db" + app = _build_admin_app(f"sqlite:///{db_path}") + base_time = datetime(2026, 3, 22, 12, 0, tzinfo=timezone.utc) + monkeypatch.setattr(main_module, "utc_now", lambda: base_time) + + with TestClient(app) as admin_client: + register = admin_client.post( + "/auth/register", + json={"email": "admin@example.com", "password": "verystrongPass123"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_legal = admin_client.post( + "/settings/admin/site-content", + data={ + "csrf_token": csrf, + "impressum_markdown": "# Impressum\n\n**Stage Test**", + "privacy_markdown": "# Datenschutz\n\nBitte Datenschutz beachten.", + }, + follow_redirects=False, + ) + assert update_legal.status_code == 303 + assert update_legal.headers["location"] == "/settings?tab=admin&msg=site_content_updated" + + impressum = admin_client.get("/impressum") + assert impressum.status_code == 200 + assert "Stage Test" in impressum.text + + form = admin_client.get("/kontakt") + started_at = _started_at_from_html(form.text) + monkeypatch.setattr(main_module, "utc_now", lambda: base_time + timedelta(seconds=5)) + submit = admin_client.post( + "/kontakt", + data={ + "csrf_token": csrf, + "started_at": started_at, + "website": "", + "category": "problem", + "name": "Admin Test", + "email": "admin@example.com", + "subject": "Ticket bitte schließen", + "message": "Dieses Ticket wird direkt im Adminbereich geschlossen.", + }, + follow_redirects=False, + ) + assert submit.status_code == 303 + + with Session(get_engine()) as db: + ticket = db.execute(select(SupportTicket).where(SupportTicket.subject == "Ticket bitte schließen")).scalar_one() + ticket_id = ticket.id + + with TestClient(app) as admin_client: + login = admin_client.post( + "/login", + data={ + "email": "admin@example.com", + "password": "verystrongPass123", + "csrf_token": _csrf_from_html(admin_client.get("/login").text), + }, + follow_redirects=False, + ) + assert login.status_code == 303 + settings_page = admin_client.get("/settings?tab=admin") + settings_csrf = _csrf_from_html(settings_page.text) + update_ticket = admin_client.post( + f"/settings/admin/tickets/{ticket_id}", + data={ + "csrf_token": settings_csrf, + "status": "closed", + "admin_notes": "Geschlossen im Test", + }, + follow_redirects=False, + ) + assert update_ticket.status_code == 303 + assert update_ticket.headers["location"] == "/settings?tab=admin&msg=ticket_updated" + + with Session(get_engine()) as db: + ticket = db.get(SupportTicket, ticket_id) + assert ticket is not None + assert ticket.status == "closed" + assert ticket.admin_notes == "Geschlossen im Test" + assert ticket.closed_at is not None diff --git a/tests/test_security_regressions.py b/tests/test_security_regressions.py new file mode 100644 index 0000000..211966b --- /dev/null +++ b/tests/test_security_regressions.py @@ -0,0 +1,307 @@ +import re + +from fastapi.testclient import TestClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.config import Settings +from app.database import get_engine +from app.main import create_app +from app.models import EmailServerConfig, User + + +def _build_settings(db_url: str, **overrides) -> Settings: + values = { + "APP_ENV": "test", + "DB_URL": db_url, + "SESSION_SECRET": "test-secret", + "COOKIE_SECURE": False, + "COOKIE_SAMESITE": "lax", + "LOGIN_RATE_LIMIT_ATTEMPTS": 5, + "LOGIN_RATE_LIMIT_WINDOW_MINUTES": 15, + } + values.update(overrides) + return Settings(**values) + + +def _extract_csrf(html: str) -> str: + match = re.search(r'name="csrf_token" value="([^"]+)"', html) + assert match is not None + return match.group(1) + + +def _find_latest_link(sent_mails: list[dict[str, str]], path_fragment: str) -> str: + pattern = re.compile(rf"https?://[^\s]+{re.escape(path_fragment)}[^\s]*") + for mail in reversed(sent_mails): + match = pattern.search(mail["body"]) + if match is not None: + return match.group(0) + raise AssertionError(f"no mail with link containing {path_fragment!r}") + + +def _insert_mail_config() -> None: + with Session(get_engine()) as db: + db.add( + EmailServerConfig( + smtp_host="smtp.test.local", + smtp_port=587, + from_email="noreply@test.local", + from_name="Stundenfuchs", + use_starttls=True, + use_ssl=False, + verify_tls=False, + registration_mails_enabled=False, + password_reset_mails_enabled=True, + ) + ) + db.commit() + + +def test_login_rate_limit_ignores_spoofed_x_forwarded_for(app): + email = "rate-limit@example.com" + password = "strongpasswordRate1" + + with TestClient(app) as register_client: + register = register_client.post( + "/auth/register", + json={"email": email, "password": password}, + ) + assert register.status_code == 200 + + with TestClient(app) as attacker: + for idx in range(5): + response = attacker.post( + "/auth/login", + headers={"x-forwarded-for": f"198.51.100.{idx}"}, + json={"email": email, "password": "wrong-password"}, + ) + assert response.status_code == 401 + + blocked = attacker.post( + "/auth/login", + headers={"x-forwarded-for": "203.0.113.77"}, + json={"email": email, "password": "wrong-password"}, + ) + assert blocked.status_code == 429 + + +def test_password_reset_new_request_invalidates_previous_token(tmp_path, monkeypatch): + db_path = tmp_path / "reset-rotation.db" + app = create_app(_build_settings(f"sqlite:///{db_path}")) + sent_mails: list[dict[str, str]] = [] + + def fake_send_email(*, settings, to_email: str, subject: str, text_body: str) -> None: + sent_mails.append({"to": to_email, "subject": subject, "body": text_body}) + + monkeypatch.setattr("app.main.send_email", fake_send_email) + _insert_mail_config() + + with TestClient(app) as auth_client: + register = auth_client.post( + "/auth/register", + json={"email": "reset-user@example.com", "password": "strongpasswordReset1"}, + ) + assert register.status_code == 200 + + with TestClient(app) as reset_client: + request_page = reset_client.get("/password-reset/request") + request_csrf = _extract_csrf(request_page.text) + first_request = reset_client.post( + "/password-reset/request", + data={"email": "reset-user@example.com", "csrf_token": request_csrf}, + ) + assert first_request.status_code == 200 + first_link = _find_latest_link(sent_mails, "/password-reset/confirm?token=") + + request_page_again = reset_client.get("/password-reset/request") + request_csrf_again = _extract_csrf(request_page_again.text) + second_request = reset_client.post( + "/password-reset/request", + data={"email": "reset-user@example.com", "csrf_token": request_csrf_again}, + ) + assert second_request.status_code == 200 + second_link = _find_latest_link(sent_mails, "/password-reset/confirm?token=") + + assert first_link != second_link + first_token_page = reset_client.get(first_link) + assert first_token_page.status_code == 400 + + second_token_page = reset_client.get(second_link) + assert second_token_page.status_code == 200 + assert 'name="token"' in second_token_page.text + + +def test_password_change_invalidates_existing_reset_tokens(tmp_path, monkeypatch): + db_path = tmp_path / "reset-password-change.db" + app = create_app(_build_settings(f"sqlite:///{db_path}")) + sent_mails: list[dict[str, str]] = [] + + def fake_send_email(*, settings, to_email: str, subject: str, text_body: str) -> None: + sent_mails.append({"to": to_email, "subject": subject, "body": text_body}) + + monkeypatch.setattr("app.main.send_email", fake_send_email) + _insert_mail_config() + + password = "strongpasswordReset2" + with TestClient(app) as user_client, TestClient(app) as reset_client: + register = user_client.post( + "/auth/register", + json={"email": "password-change@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf_token = register.json()["csrf_token"] + + request_page = reset_client.get("/password-reset/request") + request_csrf = _extract_csrf(request_page.text) + reset_request = reset_client.post( + "/password-reset/request", + data={"email": "password-change@example.com", "csrf_token": request_csrf}, + ) + assert reset_request.status_code == 200 + reset_link = _find_latest_link(sent_mails, "/password-reset/confirm?token=") + + change_password = user_client.post( + "/settings/password", + data={ + "current_password": password, + "new_password": "strongpasswordReset3", + "new_password_repeat": "strongpasswordReset3", + "csrf_token": csrf_token, + }, + follow_redirects=False, + ) + assert change_password.status_code == 303 + assert change_password.headers["location"] == "/settings?msg=password_updated" + + expired_reset = reset_client.get(reset_link) + assert expired_reset.status_code == 400 + + +def test_email_change_requires_reverification_and_clears_session(tmp_path, monkeypatch): + db_path = tmp_path / "email-change.db" + app = create_app( + _build_settings( + f"sqlite:///{db_path}", + EMAIL_VERIFICATION_REQUIRED=True, + ) + ) + sent_mails: list[dict[str, str]] = [] + + def fake_send_email(*, settings, to_email: str, subject: str, text_body: str) -> None: + sent_mails.append({"to": to_email, "subject": subject, "body": text_body}) + + monkeypatch.setattr("app.main.send_email", fake_send_email) + _insert_mail_config() + + password = "strongpasswordVerify2" + with TestClient(app) as client: + register_page = client.get("/register") + register_csrf = _extract_csrf(register_page.text) + register_submit = client.post( + "/register", + data={ + "email": "verified-before-change@example.com", + "password": password, + "csrf_token": register_csrf, + }, + follow_redirects=False, + ) + assert register_submit.status_code == 303 + assert register_submit.headers["location"] == "/login?msg=email_verification_sent" + + verify_link = _find_latest_link(sent_mails, "/verify-email?token=") + verify_response = client.get(verify_link, follow_redirects=False) + assert verify_response.status_code == 303 + + login_page = client.get("/login") + login_csrf = _extract_csrf(login_page.text) + login_submit = client.post( + "/login", + data={ + "email": "verified-before-change@example.com", + "password": password, + "csrf_token": login_csrf, + }, + follow_redirects=False, + ) + assert login_submit.status_code == 303 + + settings_page = client.get("/settings") + settings_csrf = _extract_csrf(settings_page.text) + profile_update = client.post( + "/settings/profile", + data={ + "email": "changed-address@example.com", + "current_password": password, + "csrf_token": settings_csrf, + }, + follow_redirects=False, + ) + assert profile_update.status_code == 303 + assert profile_update.headers["location"] == "/login?msg=email_verification_sent" + + me_after_change = client.get("/me") + assert me_after_change.status_code == 401 + + login_page_after_change = client.get("/login") + login_csrf_after_change = _extract_csrf(login_page_after_change.text) + blocked_login = client.post( + "/login", + data={ + "email": "changed-address@example.com", + "password": password, + "csrf_token": login_csrf_after_change, + }, + follow_redirects=False, + ) + assert blocked_login.status_code == 403 + + assert sent_mails[-1]["to"] == "changed-address@example.com" + + with Session(get_engine()) as db: + updated_user = db.execute(select(User).where(User.email == "changed-address@example.com")).scalar_one() + assert updated_user.email_verified is False + + +def test_api_mfa_resend_respects_cooldown(tmp_path, monkeypatch): + db_path = tmp_path / "mfa-resend.db" + app = create_app(_build_settings(f"sqlite:///{db_path}")) + sent_mails: list[dict[str, str]] = [] + + def fake_send_email(*, settings, to_email: str, subject: str, text_body: str) -> None: + sent_mails.append({"to": to_email, "subject": subject, "body": text_body}) + + monkeypatch.setattr("app.main.send_email", fake_send_email) + _insert_mail_config() + + password = "strongpasswordMfa1" + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "mfa-resend@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf_token = register.json()["csrf_token"] + + enable_email_mfa = client.post( + "/settings/mfa", + data={ + "csrf_token": csrf_token, + "mfa_method": "email", + "current_password": password, + }, + follow_redirects=False, + ) + assert enable_email_mfa.status_code == 303 + + logout = client.post("/auth/logout", headers={"x-csrf-token": csrf_token}) + assert logout.status_code == 200 + + login = client.post("/auth/login", json={"email": "mfa-resend@example.com", "password": password}) + assert login.status_code == 200 + assert login.json()["mfa_required"] is True + pending_csrf = login.json()["csrf_token"] + + resend = client.post("/auth/mfa/resend", headers={"x-csrf-token": pending_csrf}) + assert resend.status_code == 429 + assert "Bitte kurz warten" in resend.text diff --git a/tests/test_vacations_and_settings.py b/tests/test_vacations_and_settings.py new file mode 100644 index 0000000..e548160 --- /dev/null +++ b/tests/test_vacations_and_settings.py @@ -0,0 +1,1903 @@ +from fastapi.testclient import TestClient +from datetime import date, timedelta +import holidays +import json +import re +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app.database import get_engine +from app.models import TimeEntry, User + + +def test_vacation_reduces_weekly_target_and_month_report(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "vac@example.com", "password": "strongpasswordVac1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + add_vacation = client.post( + "/settings/vacations/add", + data={ + "start_date": "2026-03-03", + "end_date": "2026-03-04", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + week_report = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_report.status_code == 200 + data = week_report.json() + assert data["vacation_days"] == 2 + assert data["weekly_soll_minutes"] == 900 + + month_report = client.get("/reports/month", params={"month": "2026-03"}) + assert month_report.status_code == 200 + weeks = month_report.json()["weeks"] + assert any(item.get("vacation_days", 0) >= 2 for item in weeks) + + +def test_month_report_counts_partial_weeks_only_within_month(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "month-partial@example.com", "password": "strongpasswordMonth1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + # Entries in the previous month must not influence March partial week totals. + for day in ["2026-02-23", "2026-02-24", "2026-02-25", "2026-02-26", "2026-02-27"]: + create = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": day, + "start_time": "08:00", + "end_time": "14:00", + "break_minutes": 0, + }, + ) + assert create.status_code == 200 + + month_report = client.get("/reports/month", params={"month": "2026-03"}) + assert month_report.status_code == 200 + data = month_report.json() + + first_week = next(item for item in data["weeks"] if item["week_start"] == "2026-02-23") + assert first_week["ist_minutes"] == 0 + assert first_week["soll_minutes"] == 0 + assert first_week["delta_minutes"] == 0 + + # March 2026 has 22 workdays; default target is 25h/week -> 5h/day (300 min). + assert data["month_soll_minutes"] == 22 * 300 + assert data["month_delta_minutes"] == -(22 * 300) + + +def test_custom_working_days_affect_soll_and_month_partial_weeks(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "workdays@example.com", "password": "strongpasswordWork1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_target = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-02", + "weekly_target_hours": "30", + "scope": "all_weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_target.status_code == 303 + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], # Mo-Do + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + add_vacation = client.post( + "/settings/vacations/add", + data={ + "start_date": "2026-03-03", + "end_date": "2026-03-04", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + week_report = client.get("/reports/week", params={"date": "2026-03-02"}) + assert week_report.status_code == 200 + week_data = week_report.json() + assert week_data["weekly_soll_minutes"] == 900 # 2 verbleibende Arbeitstage * 7.5h + assert week_data["vacation_days"] == 2 + + month_report = client.get("/reports/month", params={"month": "2026-03"}) + assert month_report.status_code == 200 + month_data = month_report.json() + first_week = next(item for item in month_data["weeks"] if item["week_start"] == "2026-02-23") + assert first_week["soll_minutes"] == 0 + + +def test_quick_vacation_toggle_for_day_and_week(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "quickvac@example.com", "password": "strongpasswordQuick1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + day_on = client.post( + "/vacation/day/toggle", + data={"date": "2026-03-03", "return_to": "/dashboard?date=2026-03-03", "csrf_token": csrf}, + follow_redirects=False, + ) + assert day_on.status_code == 303 + week_after_day_on = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_day_on.status_code == 200 + assert week_after_day_on.json()["vacation_days"] == 1 + + day_off = client.post( + "/vacation/day/toggle", + data={"date": "2026-03-03", "return_to": "/dashboard?date=2026-03-03", "csrf_token": csrf}, + follow_redirects=False, + ) + assert day_off.status_code == 303 + week_after_day_off = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_day_off.status_code == 200 + assert week_after_day_off.json()["vacation_days"] == 0 + + week_on = client.post( + "/vacation/week/toggle", + data={ + "week_start": "2026-03-02", + "week_end": "2026-03-08", + "return_to": "/month?month=2026-03&view=flat", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert week_on.status_code == 303 + week_after_week_on = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_week_on.status_code == 200 + assert week_after_week_on.json()["vacation_days"] == 5 + + week_off = client.post( + "/vacation/week/toggle", + data={ + "week_start": "2026-03-02", + "week_end": "2026-03-08", + "return_to": "/month?month=2026-03&view=flat", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert week_off.status_code == 303 + week_after_week_off = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_week_off.status_code == 200 + assert week_after_week_off.json()["vacation_days"] == 0 + + +def test_week_vacation_toggle_uses_configured_workdays(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "weekworkdays@example.com", "password": "strongpasswordWeekWork1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_target = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-02", + "weekly_target_hours": "30", + "scope": "all_weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_target.status_code == 303 + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], # Mo-Do + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + week_on = client.post( + "/vacation/week/toggle", + data={ + "week_start": "2026-03-02", + "week_end": "2026-03-08", + "return_to": "/dashboard?date=2026-03-02", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert week_on.status_code == 303 + + week_after_week_on = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_week_on.status_code == 200 + payload = week_after_week_on.json() + assert payload["vacation_days"] == 4 + assert payload["weekly_soll_minutes"] == 0 + + +def test_settings_vacation_ranges_follow_configured_workdays(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "vac-ranges@example.com", "password": "strongpasswordVacRanges1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_allowance = client.post( + "/settings/vacation-allowance", + data={"vacation_days_total": "22", "csrf_token": csrf}, + follow_redirects=False, + ) + assert update_allowance.status_code == 303 + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], # Mo-Do + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + add_vacation = client.post( + "/settings/vacations/add", + data={ + "start_date": "2026-03-02", + "end_date": "2026-03-15", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + week_1 = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_1.status_code == 200 + assert week_1.json()["vacation_days"] == 4 + + week_2 = client.get("/reports/week", params={"date": "2026-03-10"}) + assert week_2.status_code == 200 + assert week_2.json()["vacation_days"] == 4 + + dashboard = client.get("/dashboard", params={"date": "2026-03-10"}) + assert dashboard.status_code == 200 + # Resturlaub / Gesamturlaub: 22 - 8 = 14 + assert "14/22" in dashboard.text + + settings_page = client.get("/settings") + assert settings_page.status_code == 200 + assert "02.03.2026 - 05.03.2026" in settings_page.text + assert "09.03.2026 - 12.03.2026" in settings_page.text + assert "07.03.2026 - 12.03.2026" not in settings_page.text + assert "14.03.2026 - 15.03.2026" not in settings_page.text + + delete_second_range = client.post( + "/settings/vacations/delete-range", + data={ + "start_date": "2026-03-09", + "end_date": "2026-03-12", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert delete_second_range.status_code == 303 + + week_2_after_delete = client.get("/reports/week", params={"date": "2026-03-10"}) + assert week_2_after_delete.status_code == 200 + assert week_2_after_delete.json()["vacation_days"] == 0 + + +def test_vacation_allowance_is_saved_and_shows_remaining_days_in_header(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "allowance@example.com", "password": "strongpasswordAllow1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_allowance = client.post( + "/settings/vacation-allowance", + data={"vacation_days_total": "22", "csrf_token": csrf}, + follow_redirects=False, + ) + assert update_allowance.status_code == 303 + + me = client.get("/me") + assert me.status_code == 200 + assert me.json()["vacation_days_total"] == 22 + + current_year = date.today().year + target_day = date(current_year, 1, 1) + while target_day.weekday() > 4: + target_day += timedelta(days=1) + + add_vacation = client.post( + "/settings/vacations/add", + data={ + "start_date": target_day.isoformat(), + "end_date": target_day.isoformat(), + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + dashboard = client.get("/dashboard") + assert dashboard.status_code == 200 + assert "21/22" in dashboard.text + + +def test_federal_state_auto_holidays_skip_days_with_work_entries(app): + with TestClient(app) as client: + password = "strongpasswordState1" + register = client.post( + "/auth/register", + json={"email": "state-holidays@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + holiday_map = holidays.country_holidays("DE", subdiv="NW", years=[date.today().year, date.today().year + 1]) + weekday_holidays = sorted([day for day in holiday_map.keys() if day.weekday() <= 4 and day >= date.today()]) + assert len(weekday_holidays) >= 2 + worked_holiday = weekday_holidays[0] + untouched_holiday = weekday_holidays[1] + + create_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": worked_holiday.isoformat(), + "start_time": "08:00", + "end_time": "12:00", + "break_minutes": 0, + }, + ) + assert create_entry.status_code == 200 + + update_profile = client.post( + "/settings/profile", + data={ + "email": "state-holidays@example.com", + "federal_state": "NW", + "current_password": password, + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_profile.status_code == 303 + + worked_month = client.get("/reports/month", params={"month": worked_holiday.strftime("%Y-%m")}) + assert worked_month.status_code == 200 + worked_days = {item["date"]: item for item in worked_month.json()["days"]} + assert worked_days[worked_holiday.isoformat()]["special_status"] is None + + untouched_month = client.get("/reports/month", params={"month": untouched_holiday.strftime("%Y-%m")}) + assert untouched_month.status_code == 200 + untouched_days = {item["date"]: item for item in untouched_month.json()["days"]} + assert untouched_days[untouched_holiday.isoformat()]["special_status"] == "holiday" + + +def test_federal_state_holidays_also_mark_non_configured_workdays(app): + with TestClient(app) as client: + password = "strongpasswordState2" + register = client.post( + "/auth/register", + json={"email": "state-holidays-2@example.com", "password": password}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + # Restrict workdays to Mo-Do (Friday excluded). + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + update_profile = client.post( + "/settings/profile", + data={ + "email": "state-holidays-2@example.com", + "federal_state": "HH", + "current_password": password, + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_profile.status_code == 303 + + # 01.05.2026 is Friday and still should be marked as holiday. + may_report = client.get("/reports/month", params={"month": "2026-05"}) + assert may_report.status_code == 200 + days = {item["date"]: item for item in may_report.json()["days"]} + assert days["2026-05-01"]["special_status"] == "holiday" + + +def test_special_status_reduces_soll_without_counting_as_vacation(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "specialstatus@example.com", "password": "strongpasswordSpecial1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + add_holiday = client.post( + "/special-day/toggle", + data={ + "date": "2026-03-03", + "status": "holiday", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_holiday.status_code == 303 + + week_data = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_data.status_code == 200 + payload = week_data.json() + assert payload["vacation_days"] == 0 + assert payload["weekly_soll_minutes"] == 1200 + + switch_to_sick = client.post( + "/special-day/toggle", + data={ + "date": "2026-03-03", + "status": "sick", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert switch_to_sick.status_code == 303 + + week_after_switch = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_after_switch.status_code == 200 + assert week_after_switch.json()["weekly_soll_minutes"] == 1200 + + remove_sick = client.post( + "/special-day/toggle", + data={ + "date": "2026-03-03", + "status": "sick", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert remove_sick.status_code == 303 + week_without_special = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_without_special.status_code == 200 + assert week_without_special.json()["weekly_soll_minutes"] == 1500 + + +def test_workhours_counter_settings_and_value(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "workcounter@example.com", "password": "strongpasswordCounter1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + entry_1 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-02", + "start_time": "08:00", + "end_time": "13:00", + "break_minutes": 0, + }, + ) + assert entry_1.status_code == 200 + entry_2 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "13:00", + "break_minutes": 0, + }, + ) + assert entry_2.status_code == 200 + weekend_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-07", + "start_time": "08:00", + "end_time": "13:00", + "break_minutes": 0, + }, + ) + assert weekend_entry.status_code == 200 + + add_vacation = client.post( + "/settings/vacations/add", + data={ + "start_date": "2026-03-04", + "end_date": "2026-03-04", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + add_holiday = client.post( + "/special-day/toggle", + data={ + "date": "2026-03-05", + "status": "holiday", + "return_to": "/dashboard?date=2026-03-05", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_holiday.status_code == 303 + + enable_counter = client.post( + "/settings/workhours-counter", + data={ + "workhours_counter_enabled": "on", + "workhours_counter_start_date": "2026-03-01", + "workhours_counter_end_date": "2026-03-31", + "workhours_counter_manual_offset_hours": "2.5", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert enable_counter.status_code == 303 + + settings_page = client.get("/settings") + assert settings_page.status_code == 200 + assert "Aktueller Stand im gewählten Zeitraum:" in settings_page.text + + +def test_workhours_counter_counts_flagged_non_working_days_as_regular_workdays(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "workcounter-flags@example.com", "password": "strongpasswordCounter2"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + entry_1 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={"date": "2026-03-02", "start_time": "08:00", "end_time": "13:00", "break_minutes": 0}, + ) + assert entry_1.status_code == 200 + entry_2 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={"date": "2026-03-03", "start_time": "08:00", "end_time": "13:00", "break_minutes": 0}, + ) + assert entry_2.status_code == 200 + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3", "4"], + "count_vacation_as_worktime": "on", + "count_holiday_as_worktime": "on", + "count_sick_as_worktime": "on", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + add_vacation = client.post( + "/settings/vacations/add", + data={"start_date": "2026-03-04", "end_date": "2026-03-04", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + add_holiday = client.post( + "/special-day/toggle", + data={"date": "2026-03-05", "status": "holiday", "return_to": "/dashboard?date=2026-03-05", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_holiday.status_code == 303 + + add_sick = client.post( + "/special-day/toggle", + data={"date": "2026-03-06", "status": "sick", "return_to": "/dashboard?date=2026-03-06", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_sick.status_code == 303 + + enable_counter = client.post( + "/settings/workhours-counter", + data={ + "workhours_counter_enabled": "on", + "workhours_counter_start_date": "2026-03-01", + "workhours_counter_end_date": "2026-03-31", + "workhours_counter_manual_offset_hours": "2.5", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert enable_counter.status_code == 303 + + settings_page = client.get("/settings") + assert settings_page.status_code == 200 + assert "Aktueller Stand im gewählten Zeitraum:" in settings_page.text + + +def test_automatic_break_rules_can_be_enabled_in_settings(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-break-settings@example.com", "password": "strongpasswordBreak1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_break_settings = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "default_break_minutes": "20", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_break_settings.status_code == 303 + + me = client.get("/me") + assert me.status_code == 200 + assert me.json()["automatic_break_rules_enabled"] is True + assert me.json()["default_break_minutes"] == 20 + + +def test_new_entry_uses_automatic_break_rules_for_new_entries(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-break-new@example.com", "password": "strongpasswordBreak2"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_break_settings = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "default_break_minutes": "20", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_break_settings.status_code == 303 + + create_entry = client.post( + "/entry/new", + data={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "14:01", + "break_minutes": "0", + "break_mode": "auto", + "notes": "", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert create_entry.status_code == 303 + + entries = client.get("/time-entries", params={"from": "2026-03-03", "to": "2026-03-03"}) + assert entries.status_code == 200 + payload = entries.json()["items"] + assert len(payload) == 1 + assert payload[0]["break_minutes"] == 30 + assert payload[0]["break_mode"] == "auto" + assert payload[0]["net_minutes"] == 331 + + +def test_edit_entry_can_override_automatic_break_rules_manually(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-break-edit@example.com", "password": "strongpasswordBreak3"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_break_settings = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "default_break_minutes": "20", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_break_settings.status_code == 303 + + create_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "14:30", + "break_mode": "auto", + }, + ) + assert create_entry.status_code == 200 + entry_id = create_entry.json()["id"] + assert create_entry.json()["break_minutes"] == 30 + assert create_entry.json()["break_mode"] == "auto" + + edit_entry = client.post( + f"/entry/{entry_id}/edit", + data={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "15:30", + "break_minutes": "15", + "break_mode": "manual", + "notes": "", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert edit_entry.status_code == 303 + + updated = client.get("/time-entries", params={"from": "2026-03-03", "to": "2026-03-03"}) + assert updated.status_code == 200 + payload = updated.json()["items"] + assert len(payload) == 1 + assert payload[0]["break_minutes"] == 15 + assert payload[0]["break_mode"] == "manual" + + +def test_edit_entry_recalculates_auto_break_when_times_change(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-break-recalc@example.com", "password": "strongpasswordBreak4"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_break_settings = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "default_break_minutes": "20", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_break_settings.status_code == 303 + + create_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "17:00", + "break_mode": "auto", + }, + ) + assert create_entry.status_code == 200 + entry_id = create_entry.json()["id"] + assert create_entry.json()["break_minutes"] == 30 + + update_entry = client.patch( + f"/time-entries/{entry_id}", + headers={"x-csrf-token": csrf}, + json={ + "end_time": "17:30", + "break_mode": "auto", + }, + ) + assert update_entry.status_code == 200 + assert update_entry.json()["break_minutes"] == 45 + assert update_entry.json()["break_mode"] == "auto" + + +def test_new_entry_uses_configured_default_break_when_auto_break_is_disabled(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "manual-break-default@example.com", "password": "strongpasswordBreak5"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_break_settings = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "default_break_minutes": "25", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_break_settings.status_code == 303 + + create_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:00", + "end_time": "14:00", + }, + ) + assert create_entry.status_code == 200 + assert create_entry.json()["break_minutes"] == 25 + assert create_entry.json()["break_mode"] == "manual" + + +def test_entry_form_renders_full_day_button(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "entry-form-fullday@example.com", "password": "strongpasswordBreak7"}, + ) + assert register.status_code == 200 + + entry_form = client.get("/entry/new?date=2026-03-03") + assert entry_form.status_code == 200 + assert 'name="date"' in entry_form.text + assert 'value="2026-03-03"' in entry_form.text + assert 'data-action="entry-apply-full-day"' in entry_form.text + assert 'data-full-day-net-minutes="' in entry_form.text + + +def test_auto_break_setting_keeps_manual_default_break_value(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "manual-break-preserve@example.com", "password": "strongpasswordBreak6"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + save_manual_break = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "default_break_minutes": "35", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert save_manual_break.status_code == 303 + + enable_auto_break = client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert enable_auto_break.status_code == 303 + + me = client.get("/me") + assert me.status_code == 200 + assert me.json()["automatic_break_rules_enabled"] is True + assert me.json()["default_break_minutes"] == 35 + + +def test_workhours_counter_target_warning_banner_is_rendered(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "workcounter-warning@example.com", "password": "strongpasswordCounterWarn1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + today = date.today() + start_date = (today - timedelta(days=14)).isoformat() + end_date = (today + timedelta(days=14)).isoformat() + + enable_counter = client.post( + "/settings/workhours-counter", + data={ + "workhours_counter_enabled": "on", + "workhours_counter_start_date": start_date, + "workhours_counter_end_date": end_date, + "workhours_counter_target_hours": "999", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert enable_counter.status_code == 303 + + dashboard = client.get("/dashboard") + assert dashboard.status_code == 200 + assert "Achtung: Arbeitsstundenziel wird ggf. nicht erreicht" in dashboard.text + + month = client.get("/month") + assert month.status_code == 200 + assert "Achtung: Arbeitsstundenziel wird ggf. nicht erreicht" in month.text + + +def test_register_onboarding_applies_optional_settings(app): + with TestClient(app) as client: + register_page = client.get("/register") + assert register_page.status_code == 200 + csrf_match = re.search(r'name="csrf_token" value="([^"]+)"', register_page.text) + assert csrf_match is not None + csrf = csrf_match.group(1) + + register_submit = client.post( + "/register", + data={ + "email": "onboarding@example.com", + "password": "strongpasswordOnboard1", + "federal_state": "HH", + "vacation_days_total": "22", + "vacation_show_in_header": "on", + "preferred_home_view": "month", + "entry_mode": "auto_until_today", + "overtime_start_date": "2026-02-02", + "overtime_expiry_days": "90", + "expire_negative_overtime": "on", + "workhours_counter_enabled": "on", + "workhours_counter_show_in_header": "on", + "workhours_counter_start_date": "2026-03-01", + "workhours_counter_end_date": "2026-03-31", + "workhours_counter_manual_offset_hours": "80", + "workhours_counter_target_hours": "120", + "workhours_counter_target_email_enabled": "on", + "working_days": ["0", "1", "2", "3"], + "mfa_preference": "none", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert register_submit.status_code == 303 + assert register_submit.headers["location"].startswith("/month") + + me = client.get("/me") + assert me.status_code == 200 + payload = me.json() + assert payload["federal_state"] == "HH" + assert payload["vacation_days_total"] == 22 + assert payload["vacation_show_in_header"] is True + assert payload["preferred_home_view"] == "month" + assert payload["entry_mode"] == "auto_until_today" + assert payload["overtime_start_date"] == "2026-02-02" + assert payload["overtime_expiry_days"] == 90 + assert payload["expire_negative_overtime"] is True + assert payload["working_days"] == [0, 1, 2, 3] + assert payload["workhours_counter_enabled"] is True + assert payload["workhours_counter_show_in_header"] is True + assert payload["workhours_counter_start_date"] == "2026-03-01" + assert payload["workhours_counter_end_date"] == "2026-03-31" + assert payload["workhours_counter_manual_offset_minutes"] == 4800 + assert payload["workhours_counter_target_minutes"] == 7200 + + +def test_settings_export_all_supports_backup_and_existing_formats(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "settings-export@example.com", "password": "strongpasswordExportAll1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + create = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:30", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert create.status_code == 200 + + export_xlsx = client.post( + "/settings/export-all", + data={"format": "xlsx", "csrf_token": csrf}, + ) + assert export_xlsx.status_code == 200 + assert "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" in export_xlsx.headers["content-type"] + + export_pdf = client.post( + "/settings/export-all", + data={"format": "pdf", "csrf_token": csrf}, + ) + assert export_pdf.status_code == 200 + assert "application/pdf" in export_pdf.headers["content-type"] + assert export_pdf.content.startswith(b"%PDF") + + export_backup = client.post( + "/settings/export-all", + data={"format": "backup_json", "csrf_token": csrf}, + ) + assert export_backup.status_code == 200 + assert "application/json" in export_backup.headers["content-type"] + payload = export_backup.json() + assert payload["backup_version"] == 2 + assert "user" not in payload + assert payload["settings"]["weekly_target_minutes"] == 1500 + assert len(payload["time_entries"]) == 1 + assert "weekly_target_rules" in payload + assert "vacation_periods" in payload + assert "special_day_statuses" in payload + assert "overtime_adjustments" in payload + + +def test_settings_backup_import_preview_and_execute_merge(app): + with TestClient(app) as source_client: + register = source_client.post( + "/auth/register", + json={"email": "backup-source@example.com", "password": "strongpasswordBackup1"}, + ) + assert register.status_code == 200 + source_csrf = register.json()["csrf_token"] + + source_client.post( + "/settings/workdays", + data={"working_days": ["0", "1", "2", "3"], "csrf_token": source_csrf}, + follow_redirects=False, + ) + create_source_entry = source_client.post( + "/time-entries", + headers={"x-csrf-token": source_csrf}, + json={ + "date": "2026-03-04", + "start_time": "08:30", + "end_time": "14:30", + "break_minutes": 30, + }, + ) + assert create_source_entry.status_code == 200 + export_backup = source_client.post( + "/settings/export-all", + data={"format": "backup_json", "csrf_token": source_csrf}, + ) + assert export_backup.status_code == 200 + backup_content = export_backup.content + + with TestClient(app) as target_client: + register = target_client.post( + "/auth/register", + json={"email": "backup-target@example.com", "password": "strongpasswordBackup2"}, + ) + assert register.status_code == 200 + target_csrf = register.json()["csrf_token"] + + conflicting_entry = target_client.post( + "/time-entries", + headers={"x-csrf-token": target_csrf}, + json={ + "date": "2026-03-04", + "start_time": "09:00", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert conflicting_entry.status_code == 200 + + preview_response = target_client.post( + "/settings/import/preview", + data={"import_mode": "merge", "csrf_token": target_csrf}, + files={"backup_file": ("stundenfuchs-backup.json", backup_content, "application/json")}, + ) + assert preview_response.status_code == 200 + assert "Importvorschau" in preview_response.text + assert "Konflikte Arbeitszeiteinträge: 1" in preview_response.text + + preview_id_match = re.search(r'name="preview_id" value="([^"]+)"', preview_response.text) + assert preview_id_match is not None + preview_id = preview_id_match.group(1) + + execute_response = target_client.post( + "/settings/import/execute", + data={"preview_id": preview_id, "csrf_token": target_csrf}, + ) + assert execute_response.status_code == 200 + assert "Backup importiert." in execute_response.text + + me = target_client.get("/me") + assert me.status_code == 200 + assert me.json()["working_days"] == [0, 1, 2, 3] + + +def test_register_can_import_backup_during_signup(app): + with TestClient(app) as source_client: + register = source_client.post( + "/auth/register", + json={"email": "register-import-source@example.com", "password": "strongpasswordImport1"}, + ) + assert register.status_code == 200 + source_csrf = register.json()["csrf_token"] + + source_client.post( + "/settings/preferences", + data={ + "preferred_home_view": "month", + "preferred_month_view_mode": "weeks", + "entry_mode": "auto_until_today", + "csrf_token": source_csrf, + }, + follow_redirects=False, + ) + source_client.post( + "/settings/weekly-target", + data={ + "weekly_target_hours": "25", + "automatic_break_rules_enabled": "on", + "default_break_minutes": "20", + "csrf_token": source_csrf, + }, + follow_redirects=False, + ) + source_client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], + "count_vacation_as_worktime": "on", + "csrf_token": source_csrf, + }, + follow_redirects=False, + ) + create_source_entry = source_client.post( + "/time-entries", + headers={"x-csrf-token": source_csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:30", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert create_source_entry.status_code == 200 + export_backup = source_client.post( + "/settings/export-all", + data={"format": "backup_json", "csrf_token": source_csrf}, + ) + assert export_backup.status_code == 200 + backup_content = export_backup.content + + with TestClient(app) as target_client: + register_page = target_client.get("/register") + assert register_page.status_code == 200 + csrf_match = re.search(r'name="csrf_token" value="([^"]+)"', register_page.text) + assert csrf_match is not None + register_csrf = csrf_match.group(1) + + register_submit = target_client.post( + "/register", + data={ + "email": "register-import-target@example.com", + "password": "strongpasswordImport2", + "entry_mode": "manual", + "mfa_preference": "none", + "csrf_token": register_csrf, + }, + files={"backup_file": ("stundenfuchs-backup.json", backup_content, "application/json")}, + follow_redirects=False, + ) + assert register_submit.status_code == 303 + + me = target_client.get("/me") + assert me.status_code == 200 + payload = me.json() + assert payload["preferred_home_view"] == "month" + assert payload["entry_mode"] == "auto_until_today" + assert payload["working_days"] == [0, 1, 2, 3] + assert payload["count_vacation_as_worktime"] is True + assert payload["automatic_break_rules_enabled"] is True + + +def test_settings_import_accepts_legacy_backup_version_one(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "legacy-import@example.com", "password": "strongpasswordLegacy1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + export_backup = client.post( + "/settings/export-all", + data={"format": "backup_json", "csrf_token": csrf}, + ) + assert export_backup.status_code == 200 + payload = export_backup.json() + legacy_payload = { + **payload, + "backup_version": 1, + "user": { + "email": "legacy@example.com", + "created_at": "2026-03-01T12:00:00+00:00", + "settings": payload["settings"], + }, + } + del legacy_payload["settings"] + + preview_response = client.post( + "/settings/import/preview", + data={"import_mode": "merge", "csrf_token": csrf}, + files={"backup_file": ("legacy-backup.json", json.dumps(legacy_payload).encode("utf-8"), "application/json")}, + ) + assert preview_response.status_code == 200 + assert "Importvorschau" in preview_response.text + + +def test_user_can_delete_own_account_and_related_data(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "delete-me@example.com", "password": "strongpasswordDelete1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + user_id = register.json()["id"] + + create = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-03", + "start_time": "08:30", + "end_time": "15:00", + "break_minutes": 30, + }, + ) + assert create.status_code == 200 + + delete_account = client.post( + "/settings/account/delete", + data={ + "confirm_email": "delete-me@example.com", + "current_password": "strongpasswordDelete1", + "confirm_delete": "on", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert delete_account.status_code == 303 + assert delete_account.headers["location"] == "/login?msg=account_deleted" + + with Session(get_engine()) as db: + user = db.execute(select(User).where(User.id == user_id)).scalar_one_or_none() + entries = db.execute(select(TimeEntry).where(TimeEntry.user_id == user_id)).scalars().all() + assert user is None + assert entries == [] + + +def test_settings_default_view_redirect(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "prefs@example.com", "password": "strongpasswordPrefs1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_prefs = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "month", + "preferred_month_view_mode": "weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_prefs.status_code == 303 + + root_redirect = client.get("/", follow_redirects=False) + assert root_redirect.status_code == 303 + assert root_redirect.headers["location"].startswith("/month?view=weeks") + + dashboard_redirect = client.get("/dashboard", follow_redirects=False) + assert dashboard_redirect.status_code == 303 + assert dashboard_redirect.headers["location"].startswith("/month?view=weeks") + + +def test_main_navigation_uses_explicit_period_links(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "nav-periods@example.com", "password": "strongpasswordNav1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_prefs = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "month", + "preferred_month_view_mode": "flat", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_prefs.status_code == 303 + + month_page = client.get("/month", params={"month": "2026-03", "view": "flat"}) + assert month_page.status_code == 200 + assert f'href="/dashboard?date={date.today().isoformat()}"' in month_page.text + assert 'href="/month?month=2026-03&view=flat"' in month_page.text + + +def test_overtime_start_and_expiry_rules(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "overtime@example.com", "password": "strongpasswordOver1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_target = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-02", + "weekly_target_hours": "10", + "scope": "all_weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_target.status_code == 303 + + entry_week1 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-02", + "start_time": "08:00", + "end_time": "20:00", + "break_minutes": 0, + }, + ) + assert entry_week1.status_code == 200 + + entry_week2 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-09", + "start_time": "08:00", + "end_time": "16:00", + "break_minutes": 0, + }, + ) + assert entry_week2.status_code == 200 + + baseline = client.get("/reports/week", params={"date": "2026-03-09"}) + assert baseline.status_code == 200 + assert baseline.json()["cumulative_delta_minutes"] == 0 + + set_start_date = client.post( + "/settings/overtime", + data={ + "overtime_start_date": "2026-03-09", + "overtime_expiry_days": "", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_start_date.status_code == 303 + + with_start = client.get("/reports/week", params={"date": "2026-03-09"}) + assert with_start.status_code == 200 + assert with_start.json()["cumulative_delta_minutes"] == -120 + + week_before_start = client.get("/reports/week", params={"date": "2026-03-02"}) + assert week_before_start.status_code == 200 + assert week_before_start.json()["weekly_ist_minutes"] == 0 + assert week_before_start.json()["weekly_soll_minutes"] == 0 + assert week_before_start.json()["weekly_delta_minutes"] == 0 + assert week_before_start.json()["cumulative_delta_minutes"] == 0 + + set_expiry_keep_negative = client.post( + "/settings/overtime", + data={ + "overtime_start_date": "", + "overtime_expiry_days": "3", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_expiry_keep_negative.status_code == 303 + + expiry_keep_negative = client.get("/reports/week", params={"date": "2026-03-09"}) + assert expiry_keep_negative.status_code == 200 + assert expiry_keep_negative.json()["cumulative_delta_minutes"] == -960 + + set_expiry_drop_negative = client.post( + "/settings/overtime", + data={ + "overtime_start_date": "", + "overtime_expiry_days": "3", + "expire_negative_overtime": "on", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_expiry_drop_negative.status_code == 303 + + expiry_drop_negative = client.get("/reports/week", params={"date": "2026-03-09"}) + assert expiry_drop_negative.status_code == 200 + assert expiry_drop_negative.json()["cumulative_delta_minutes"] == -240 + + +def test_overtime_adjustment_counts_before_overtime_start_date(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "overtime-adjustment@example.com", "password": "strongpasswordAdjust1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_start_date = client.post( + "/settings/overtime", + data={ + "overtime_start_date": "2026-03-09", + "overtime_expiry_days": "", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_start_date.status_code == 303 + + adjustment = client.post( + "/overtime-adjustment/set", + data={ + "date": "2026-03-03", + "adjustment_mode": "manual", + "adjustment_value": "-02:00", + "return_to": "/entry/new?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert adjustment.status_code == 303 + + week_before_start = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_before_start.status_code == 200 + payload = week_before_start.json() + assert payload["weekly_ist_minutes"] == 0 + assert payload["weekly_soll_minutes"] == 0 + assert payload["weekly_delta_minutes"] == -120 + assert payload["cumulative_delta_minutes"] == -120 + assert payload["days"][1]["overtime_adjustment_minutes"] == -120 + + +def test_overtime_adjustment_can_be_combined_with_holiday(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "overtime-adjustment-holiday@example.com", "password": "strongpasswordAdjust2"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_target = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-02", + "weekly_target_hours": "30", + "scope": "all_weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_target.status_code == 303 + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + add_holiday = client.post( + "/special-day/toggle", + data={ + "date": "2026-03-03", + "status": "holiday", + "return_to": "/dashboard?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_holiday.status_code == 303 + + add_full_day_adjustment = client.post( + "/overtime-adjustment/set", + data={ + "date": "2026-03-03", + "adjustment_mode": "full_day", + "full_day_direction": "negative", + "return_to": "/entry/new?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_full_day_adjustment.status_code == 303 + + week_report = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_report.status_code == 200 + payload = week_report.json() + assert payload["days"][1]["special_status"] == "holiday" + assert payload["days"][1]["overtime_adjustment_minutes"] == -450 + assert payload["weekly_ist_minutes"] == 0 + assert payload["weekly_soll_minutes"] == 1350 + assert payload["weekly_delta_minutes"] == -1800 + + +def test_overtime_adjustment_interval_mode_changes_delta(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "overtime-adjustment-interval@example.com", "password": "strongpasswordAdjust3"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + baseline = client.get("/reports/week", params={"date": "2026-03-03"}) + assert baseline.status_code == 200 + + add_interval_adjustment = client.post( + "/overtime-adjustment/set", + data={ + "date": "2026-03-03", + "adjustment_mode": "interval", + "interval_start_time": "08:15", + "interval_end_time": "10:45", + "interval_direction": "positive", + "return_to": "/overtime-adjustment/edit?date=2026-03-03", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert add_interval_adjustment.status_code == 303 + + updated = client.get("/reports/week", params={"date": "2026-03-03"}) + assert updated.status_code == 200 + updated_payload = updated.json() + assert updated_payload["weekly_delta_minutes"] == baseline.json()["weekly_delta_minutes"] + 150 + assert updated_payload["days"][1]["overtime_adjustment_minutes"] == 150 + + +def test_day_forms_are_split_by_function(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "focused-forms@example.com", "password": "strongpasswordForms1"}, + ) + assert register.status_code == 200 + + time_form = client.get("/entry/new", params={"date": "2026-03-03"}) + assert time_form.status_code == 200 + assert "Arbeitsbeginn" in time_form.text + assert "Tagesmodus" not in time_form.text + + status_form = client.get("/day-status/edit", params={"date": "2026-03-03", "status": "holiday"}) + assert status_form.status_code == 200 + assert "Feiertag" in status_form.text + assert "Arbeitsbeginn" not in status_form.text + + overtime_form = client.get("/overtime-adjustment/edit", params={"date": "2026-03-03"}) + assert overtime_form.status_code == 200 + assert "Von-Bis Uhrzeit" in overtime_form.text + assert "Arbeitsbeginn" not in overtime_form.text + + +def test_non_working_days_can_count_as_regular_workdays(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "count-special-days@example.com", "password": "strongpasswordCount1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3"], + "count_vacation_as_worktime": "on", + "count_holiday_as_worktime": "on", + "count_sick_as_worktime": "on", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + add_vacation = client.post( + "/vacation/day/toggle", + data={"date": "2026-03-03", "return_to": "/dashboard?date=2026-03-03", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_vacation.status_code == 303 + + add_holiday = client.post( + "/special-day/toggle", + data={"date": "2026-03-04", "status": "holiday", "return_to": "/dashboard?date=2026-03-03", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_holiday.status_code == 303 + + add_sick = client.post( + "/special-day/toggle", + data={"date": "2026-03-05", "status": "sick", "return_to": "/dashboard?date=2026-03-03", "csrf_token": csrf}, + follow_redirects=False, + ) + assert add_sick.status_code == 303 + + week_report = client.get("/reports/week", params={"date": "2026-03-03"}) + assert week_report.status_code == 200 + payload = week_report.json() + assert payload["weekly_soll_minutes"] == 1500 + assert payload["weekly_ist_minutes"] == 1125 + assert payload["weekly_delta_minutes"] == -375 + + +def test_auto_entry_mode_prefills_only_until_today(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-mode@example.com", "password": "strongpasswordAuto1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + set_auto_mode = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "week", + "preferred_month_view_mode": "flat", + "entry_mode": "auto_until_today", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_auto_mode.status_code == 303 + + today = date.today() + today_items = client.get( + "/time-entries", + params={"from": today.isoformat(), "to": today.isoformat()}, + ) + assert today_items.status_code == 200 + today_payload = today_items.json() + if today.weekday() <= 4: + assert len(today_payload["items"]) == 1 + assert today_payload["items"][0]["date"] == today.isoformat() + assert today_payload["items"][0]["start_time"] == "08:30" + else: + assert len(today_payload["items"]) == 0 + + future_workday = today + timedelta(days=1) + while future_workday.weekday() > 4: + future_workday += timedelta(days=1) + + future_items = client.get( + "/time-entries", + params={"from": future_workday.isoformat(), "to": future_workday.isoformat()}, + ) + assert future_items.status_code == 200 + assert len(future_items.json()["items"]) == 0 + + +def test_deleting_auto_entry_keeps_day_empty_in_auto_mode(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-delete@example.com", "password": "strongpasswordAutoDelete1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + update_workdays = client.post( + "/settings/workdays", + data={ + "working_days": ["0", "1", "2", "3", "4", "5", "6"], + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert update_workdays.status_code == 303 + + set_auto_mode = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "week", + "preferred_month_view_mode": "flat", + "entry_mode": "auto_until_today", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert set_auto_mode.status_code == 303 + + today = date.today().isoformat() + initial_items = client.get("/time-entries", params={"from": today, "to": today}) + assert initial_items.status_code == 200 + assert len(initial_items.json()["items"]) == 1 + entry_id = initial_items.json()["items"][0]["id"] + + delete_entry = client.delete( + f"/time-entries/{entry_id}", + headers={"x-csrf-token": csrf}, + ) + assert delete_entry.status_code == 200 + + after_delete = client.get("/time-entries", params={"from": today, "to": today}) + assert after_delete.status_code == 200 + assert after_delete.json()["items"] == [] + + +def test_switching_modes_remove_future_auto_entries(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "auto-manual-switch@example.com", "password": "strongpasswordAuto2"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + today = date.today() + future_workday = today + timedelta(days=1) + while future_workday.weekday() > 4: + future_workday += timedelta(days=1) + + create_future_auto_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": future_workday.isoformat(), + "start_time": "08:30", + "end_time": "14:00", + "break_minutes": 0, + "notes": "Automatisch vorausgefuellt", + }, + ) + assert create_future_auto_entry.status_code == 200 + + before_switch = client.get( + "/time-entries", + params={"from": future_workday.isoformat(), "to": future_workday.isoformat()}, + ) + assert before_switch.status_code == 200 + assert len(before_switch.json()["items"]) == 1 + + enable_auto_until_today = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "week", + "preferred_month_view_mode": "flat", + "entry_mode": "auto_until_today", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert enable_auto_until_today.status_code == 303 + + after_auto_until_today = client.get( + "/time-entries", + params={"from": future_workday.isoformat(), "to": future_workday.isoformat()}, + ) + assert after_auto_until_today.status_code == 200 + assert len(after_auto_until_today.json()["items"]) == 0 + + recreate_future_auto_entry = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": future_workday.isoformat(), + "start_time": "08:30", + "end_time": "14:00", + "break_minutes": 0, + "notes": "Automatisch vorausgefuellt", + }, + ) + assert recreate_future_auto_entry.status_code == 200 + + disable_auto = client.post( + "/settings/preferences", + data={ + "preferred_home_view": "week", + "preferred_month_view_mode": "flat", + "entry_mode": "manual", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert disable_auto.status_code == 303 + + after_disable = client.get( + "/time-entries", + params={"from": future_workday.isoformat(), "to": future_workday.isoformat()}, + ) + assert after_disable.status_code == 200 + assert len(after_disable.json()["items"]) == 0 + + +def test_help_page_is_available_for_authenticated_users(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "help-page@example.com", "password": "strongpasswordHelp1"}, + ) + assert register.status_code == 200 + + help_page = client.get("/hilfe") + assert help_page.status_code == 200 + assert "Stundenausgleich (S)" in help_page.text + assert "Arbeitsstunden-Counter" in help_page.text + assert "Schritt-für-Schritt-Anleitungen" in help_page.text + assert "gesetzliche Mindestpause" in help_page.text + + +def test_root_renders_guest_landing(app): + with TestClient(app) as guest_client: + landing = guest_client.get("/") + assert landing.status_code == 200 + assert "Arbeitszeit, Urlaub und Überstunden an einem Ort" in landing.text + assert "Jetzt registrieren" in landing.text + assert 'href="/register"' in landing.text + assert 'href="/login"' in landing.text diff --git a/tests/test_weekly_targets.py b/tests/test_weekly_targets.py new file mode 100644 index 0000000..e65a19d --- /dev/null +++ b/tests/test_weekly_targets.py @@ -0,0 +1,88 @@ +from fastapi.testclient import TestClient + + +def test_weekly_target_scopes(app): + with TestClient(app) as client: + register = client.post( + "/auth/register", + json={"email": "scope@example.com", "password": "strongpasswordScope1"}, + ) + assert register.status_code == 200 + csrf = register.json()["csrf_token"] + + create_w1 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-02", # Montag + "start_time": "08:00", + "end_time": "13:00", + "break_minutes": 0, + }, + ) + assert create_w1.status_code == 200 + + create_w2 = client.post( + "/time-entries", + headers={"x-csrf-token": csrf}, + json={ + "date": "2026-03-09", # Folgewoche + "start_time": "08:00", + "end_time": "13:00", + "break_minutes": 0, + }, + ) + assert create_w2.status_code == 200 + + week1_default = client.get("/reports/week", params={"date": "2026-03-02"}) + assert week1_default.status_code == 200 + assert week1_default.json()["weekly_soll_minutes"] == 1500 + + change_current = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-02", + "weekly_target_hours": "20", + "scope": "current_week", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert change_current.status_code == 303 + + week1_changed = client.get("/reports/week", params={"date": "2026-03-02"}) + week2_after_current = client.get("/reports/week", params={"date": "2026-03-09"}) + assert week1_changed.json()["weekly_soll_minutes"] == 1200 + assert week2_after_current.json()["weekly_soll_minutes"] == 1500 + + change_future = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-09", + "weekly_target_hours": "30", + "scope": "from_current_week", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert change_future.status_code == 303 + + week2_changed = client.get("/reports/week", params={"date": "2026-03-09"}) + assert week2_changed.json()["weekly_soll_minutes"] == 1800 + + change_all = client.post( + "/weekly-target", + data={ + "week_start": "2026-03-09", + "weekly_target_hours": "22", + "scope": "all_weeks", + "csrf_token": csrf, + }, + follow_redirects=False, + ) + assert change_all.status_code == 303 + + week1_all = client.get("/reports/week", params={"date": "2026-03-02"}) + week2_all = client.get("/reports/week", params={"date": "2026-03-09"}) + assert week1_all.json()["weekly_soll_minutes"] == 1320 + assert week2_all.json()["weekly_soll_minutes"] == 1320 diff --git a/tools/policy_checks.py b/tools/policy_checks.py new file mode 100755 index 0000000..c815cd4 --- /dev/null +++ b/tools/policy_checks.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import re +import sys +from pathlib import Path + +ROOT = Path(__file__).resolve().parents[1] +TEMPLATES_DIR = ROOT / "app" / "templates" +PAGES_DIR = TEMPLATES_DIR / "pages" +BASE_FILE = TEMPLATES_DIR / "base.html" +CSS_DIR = ROOT / "app" / "static" / "css" + +RULE_EXTENDS = "POL001" +RULE_INLINE_STYLE = "POL002" +RULE_EXTRA_ASSETS = "POL003" +RULE_HEX_OUTSIDE_TOKENS = "POL004" +RULE_PX_SPACING = "POL005" +RULE_BASE_ASSETS = "POL006" + +EXTENDS_BASE_RE = re.compile(r"\{%-?\s*extends\s+\"base\.html\"\s*-?%\}") +INLINE_STYLE_RE = re.compile(r"]+rel=\"stylesheet\"|]+src=\"", re.IGNORECASE) +HEX_RE = re.compile(r"#[0-9a-fA-F]{3,8}") +PX_SPACING_RE = re.compile( + r"(?:margin|padding|gap|row-gap|column-gap)\s*:\s*[^;]*\d+px", + flags=re.IGNORECASE, +) + + +def err(errors: list[str], path: Path, line_no: int, rule: str, message: str) -> None: + rel = path.relative_to(ROOT) + errors.append(f"{rel}:{line_no}: {rule} {message}") + + +def check_base_assets(errors: list[str]) -> None: + if not BASE_FILE.exists(): + errors.append(f"{BASE_FILE.relative_to(ROOT)}:1: {RULE_BASE_ASSETS} Missing base.html") + return + + base_content = BASE_FILE.read_text(encoding="utf-8") + css_hits = re.findall(r'/static/css/[^\"]+', base_content) + js_hits = re.findall(r'/static/js/[^\"]+', base_content) + + expected_css = ["/static/css/app.css?v={{ asset_version }}"] + expected_js = ["/static/js/app.js?v={{ asset_version }}"] + + if css_hits != expected_css: + errors.append( + f"{BASE_FILE.relative_to(ROOT)}:1: {RULE_BASE_ASSETS} expected CSS include {expected_css}, found {css_hits}" + ) + + if js_hits != expected_js: + errors.append( + f"{BASE_FILE.relative_to(ROOT)}:1: {RULE_BASE_ASSETS} expected JS include {expected_js}, found {js_hits}" + ) + + +def check_pages_extend_base(errors: list[str]) -> None: + page_files = sorted(PAGES_DIR.glob("*.html")) + if not page_files: + errors.append(f"{PAGES_DIR.relative_to(ROOT)}:1: {RULE_EXTENDS} No page templates found") + return + + for path in page_files: + content = path.read_text(encoding="utf-8") + if not EXTENDS_BASE_RE.search(content): + err( + errors, + path, + 1, + RULE_EXTENDS, + "page template must contain {% extends \"base.html\" %}", + ) + + +def check_templates_inline_and_assets(errors: list[str]) -> None: + for path in sorted(TEMPLATES_DIR.rglob("*.html")): + for idx, line in enumerate(path.read_text(encoding="utf-8").splitlines(), start=1): + if INLINE_STYLE_RE.search(line): + err(errors, path, idx, RULE_INLINE_STYLE, "inline style or